diff --git a/README.md b/README.md index c6224640..1b358f00 100644 --- a/README.md +++ b/README.md @@ -54,7 +54,7 @@ See README.md in [haskell_tool](./haskell_tool/). * Cornelius Diekmann, Lukas Schwaighofer, and Georg Carle. *Certifying spoofing-protection of firewalls.* In 11th International Conference on Network and Service Management, CNSM, Barcelona, Spain, November 2015. [[preprint]](http://www.net.in.tum.de/fileadmin/bibtex/publications/papers/diekmann2015_cnsm.pdf), [[ieee | paywall]](http://ieeexplore.ieee.org/document/7367354/) * Cornelius Diekmann, Lars Hupel, and Georg Carle. *Semantics-Preserving Simplification of Real-World Firewall Rule Sets.* In 20th International Symposium on Formal Methods, June 2015. [[preprint]](http://www.net.in.tum.de/fileadmin/bibtex/publications/papers/fm15_Semantics-Preserving_Simplification_of_Real-World_Firewall_Rule_Sets.pdf), [[springer | paywall]](http://link.springer.com/chapter/10.1007%2F978-3-319-19249-9_13) -The raw data of the iptables rulesets from the Examples is stored in [this](https://github.com/diekmann/net-network) repositoy. +The raw data of the iptables rulesets from the Examples is stored in [this](https://github.com/diekmann/net-network) repository. --- diff --git a/haskell_tool/README.md b/haskell_tool/README.md index 38eb117d..a367a995 100644 --- a/haskell_tool/README.md +++ b/haskell_tool/README.md @@ -57,7 +57,7 @@ Available options: --routingtbl STRING Optional path to a routing table. --table STRING The table to load for analysis. Default: `filter`. Note: This tool does not support packet modification, - so loading tables such as `nat` will most likeley + so loading tables such as `nat` will most likely fail. --chain STRING The chain to start the analysis. Default: `FORWARD`. Use `INPUT` for a host-based firewall. diff --git a/haskell_tool/lib/Data_Bits.README b/haskell_tool/lib/Data_Bits.README index f8fde8c1..2294078a 100644 --- a/haskell_tool/lib/Data_Bits.README +++ b/haskell_tool/lib/Data_Bits.README @@ -1,3 +1,3 @@ -Automaticalyy generated by AFP Native_Word entry!! +Automatically generated by AFP Native_Word entry!! Do not edit! Regenerate each time! diff --git a/haskell_tool/lib/Network/IPTables/Analysis.hs b/haskell_tool/lib/Network/IPTables/Analysis.hs index 59c1ada2..41984b0b 100644 --- a/haskell_tool/lib/Network/IPTables/Analysis.hs +++ b/haskell_tool/lib/Network/IPTables/Analysis.hs @@ -58,7 +58,7 @@ certifySpoofingProtection_generic (IsabelleIpAssmt a -> [Isabelle.Rule (Isabelle.Common_primitive a)] -> [String]) -> IsabelleIpAssmt a -> [Isabelle.Rule (Isabelle.Common_primitive a)] -> ([String], [(Isabelle.Iface, Bool)]) certifySpoofingProtection_generic debug_ipassmt_f ipassmt rs = (warn_defined ++ debug_ipassmt, certResult) - where -- fuc: firewall under certification, prepocessed + where -- fuc: firewall under certification, preprocessed -- no_spoofing_executable_set requires normalized_nnf_match. Isabelle.upper_closure guarantees this. -- It also guarantees that if we start from a simple_ruleset, it remains a simple ruleset. -- Theorem: no_spoofing_executable_set_preprocessed diff --git a/haskell_tool/lib/Network/IPTables/Main.hs b/haskell_tool/lib/Network/IPTables/Main.hs index 40194fb9..fb9ae276 100644 --- a/haskell_tool/lib/Network/IPTables/Main.hs +++ b/haskell_tool/lib/Network/IPTables/Main.hs @@ -45,7 +45,7 @@ data CommandLineArgsLabeled = CommandLineArgsLabeled { verbose :: Bool "Show verbose debug output (for example, of the parser)." , ipassmt :: Maybe FilePath "Optional path to an IP assignment file. If not specified, it only loads `lo = [127.0.0.0/8]`." , routingtbl :: Maybe FilePath "Optional path to a routing table." - , table :: Maybe String "The table to load for analysis. Default: `filter`. Note: This tool does not support packet modification, so loading tables such as `nat` will most likeley fail." + , table :: Maybe String "The table to load for analysis. Default: `filter`. Note: This tool does not support packet modification, so loading tables such as `nat` will most likely fail." , chain :: Maybe String "The chain to start the analysis. Default: `FORWARD`. Use `INPUT` for a host-based firewall." --TODO: we need some grouping for specific options for the analysis -- For example ./fffuu --analysis service-matrix --sport 424242 --analysis spoofing --foo @@ -56,7 +56,7 @@ data CommandLineArgsLabeled = CommandLineArgsLabeled instance ParseRecord CommandLineArgsLabeled --- unlabeld, mandatory command line arguments. For example: ./fffuu "path/to/iptables-save" +-- unlabeled, mandatory command line arguments. For example: ./fffuu "path/to/iptables-save" -- http://stackoverflow.com/questions/36375556/haskell-unnamed-command-line-arguments-for-optparse-generic/36382477#36382477 data CommandLineArgsUnlabeled = CommandLineArgsUnlabeled (FilePath "Required: Path to `iptables-save` output. This is the input for this tool.") @@ -133,7 +133,7 @@ main' ops = do \It abstracts over unknown matches. \ \By default, it does an overapproximation, i.e. it loads a more permissive version of the ruleset. \ \Then it runs a number of analysis. \ - \Overapproximation means: if the anaylsis concludes that the packets you want to be dropped are dropped in the loaded overapproximation, then they are also dropped for your real firewall (without approximation)." + \Overapproximation means: if the analysis concludes that the packets you want to be dropped are dropped in the loaded overapproximation, then they are also dropped for your real firewall (without approximation)." --print (cmdArgs::CommandLineArgs) (verbose, ipassmt, rtbl, table, chain, smOptions, (srcname, src)) <- readArgs ops cmdArgs diff --git a/haskell_tool/lib/Network/IPTables/Ruleset.hs b/haskell_tool/lib/Network/IPTables/Ruleset.hs index 7d991964..287496bb 100644 --- a/haskell_tool/lib/Network/IPTables/Ruleset.hs +++ b/haskell_tool/lib/Network/IPTables/Ruleset.hs @@ -156,7 +156,7 @@ filter_Isabelle_Action ps = case fAction ps of [] -> Isabelle.Empty --- this is just DEBUGING +-- this is just DEBUGGING -- tries to catch errors of rulesetLookup checkParsedTables :: Isabelle.Len a => Ruleset a -> IO () checkParsedTables res = mapM_ check tables @@ -168,7 +168,7 @@ checkParsedTables res = mapM_ check tables errormsg t msg = concat ["Table `", t ,"' caught exception: `" , msg , "'. Analysis not possible for this table. " - , "This is probably due to unsupportd actions " + , "This is probably due to unsupported actions " , "(or a bug in the parser)."] success t chain = concat ["Parsed ", show (length chain) , " chains in table ", t, ", a total of " diff --git a/haskell_tool/test/Suites/GoldenFiles/help b/haskell_tool/test/Suites/GoldenFiles/help index 5b9468b5..199b3885 100644 --- a/haskell_tool/test/Suites/GoldenFiles/help +++ b/haskell_tool/test/Suites/GoldenFiles/help @@ -4,7 +4,7 @@ and loads one table and chain. By default, it loads the `filter` table and the where only `ACCEPT` and `DROP` actions remain. It abstracts over unknown matches. By default, it does an overapproximation, i.e. it loads a more permissive version of the ruleset. Then it runs a number of analysis. -Overapproximation means: if the anaylsis concludes that the packets you want to +Overapproximation means: if the analysis concludes that the packets you want to be dropped are dropped in the loaded overapproximation, then they are also dropped for your real firewall (without approximation). @@ -21,7 +21,7 @@ Available options: --routingtbl STRING Optional path to a routing table. --table STRING The table to load for analysis. Default: `filter`. Note: This tool does not support packet modification, - so loading tables such as `nat` will most likeley + so loading tables such as `nat` will most likely fail. --chain STRING The chain to start the analysis. Default: `FORWARD`. Use `INPUT` for a host-based firewall. diff --git a/haskell_tool/test/Suites/GoldenFiles/i8_iptables-save-2015-05-15_15-23-41_cheating b/haskell_tool/test/Suites/GoldenFiles/i8_iptables-save-2015-05-15_15-23-41_cheating index 8ccc70ef..a6cc13c4 100644 --- a/haskell_tool/test/Suites/GoldenFiles/i8_iptables-save-2015-05-15_15-23-41_cheating +++ b/haskell_tool/test/Suites/GoldenFiles/i8_iptables-save-2015-05-15_15-23-41_cheating @@ -2,8 +2,8 @@ Parsed IpAssmt IpAssmt [(eth0,Pos [0.0.0.0-255.255.255.255]),(foo,Pos []),(eth1.96,Pos [131.159.14.3/25]),(eth1.108,Pos [131.159.14.129/26]),(eth1.109,Pos [131.159.20.11/24]),(eth1.110,Neg [131.159.14.0/23,131.159.20.0/23,192.168.212.0/23,188.95.233.0/24,188.95.232.192/27,188.95.234.0/23,192.48.107.0/24,188.95.236.0/22,185.86.232.0/22]),(eth1.116,Pos [131.159.15.131/26]),(eth1.152,Pos [131.159.15.252/28]),(eth1.171,Pos [131.159.15.2/26]),(eth1.173,Pos [131.159.21.252/24]),(eth1.1010,Pos [131.159.15.227/28]),(eth1.1011,Pos [131.159.14.194/27]),(eth1.1012,Pos [131.159.14.238/28]),(eth1.1014,Pos [131.159.15.217/27]),(eth1.1016,Pos [131.159.15.66/26]),(eth1.1017,Pos [131.159.14.242/28]),(eth1.1111,Pos [192.168.212.4/24]),(eth1.97,Pos [188.95.233.2/24]),(eth1.1019,Pos [188.95.234.2/23]),(eth1.1020,Pos [192.48.107.2/24]),(eth1.1023,Pos [188.95.236.2/22]),(eth1.1025,Pos [185.86.232.2/22]),(eth1.1024,Neg [131.159.14.0/23,131.159.20.0/23,192.168.212.0/23,188.95.233.0/24,188.95.232.192/27,188.95.234.0/23,192.48.107.0/24,188.95.236.0/22,185.86.232.0/22])] == Checking which tables are supported for analysis. Usually, only `filter'. == Parsed 90 chains in table filter, a total of 4813 rules -Table `nat' caught exception: `Reading ruleset failed! sanity_wf_ruleset check failed.'. Analysis not possible for this table. This is probably due to unsupportd actions (or a bug in the parser). -Table `raw' caught exception: `Reading ruleset failed! sanity_wf_ruleset check failed.'. Analysis not possible for this table. This is probably due to unsupportd actions (or a bug in the parser). +Table `nat' caught exception: `Reading ruleset failed! sanity_wf_ruleset check failed.'. Analysis not possible for this table. This is probably due to unsupported actions (or a bug in the parser). +Table `raw' caught exception: `Reading ruleset failed! sanity_wf_ruleset check failed.'. Analysis not possible for this table. This is probably due to unsupported actions (or a bug in the parser). == Transformed to Isabelle type (only filter table) == == unfolded FORWARD chain (upper closure) == (-m state --state ESTABLISHED,RELATED,UNTRACKED, -j ACCEPT) diff --git a/haskell_tool/test/Suites/GoldenFiles/sqrl_2015_aug_iptables-save-spoofing-protection b/haskell_tool/test/Suites/GoldenFiles/sqrl_2015_aug_iptables-save-spoofing-protection index 8d25dc2c..0baec188 100644 --- a/haskell_tool/test/Suites/GoldenFiles/sqrl_2015_aug_iptables-save-spoofing-protection +++ b/haskell_tool/test/Suites/GoldenFiles/sqrl_2015_aug_iptables-save-spoofing-protection @@ -134,8 +134,8 @@ INFO: Officially, we only support the filter table. You requested the `raw' tabl INFO: Officially, we only support the chains FORWARD, INPUT, OUTPUT. You requested the `PREROUTING' chain. Let's see what happens ;-) == Checking which tables are supported for analysis. Usually, only `filter'. == Parsed 7 chains in table filter, a total of 53 rules -Table `mangle' caught exception: `Reading ruleset failed! sanity_wf_ruleset check failed.'. Analysis not possible for this table. This is probably due to unsupportd actions (or a bug in the parser). -Table `nat' caught exception: `Reading ruleset failed! sanity_wf_ruleset check failed.'. Analysis not possible for this table. This is probably due to unsupportd actions (or a bug in the parser). +Table `mangle' caught exception: `Reading ruleset failed! sanity_wf_ruleset check failed.'. Analysis not possible for this table. This is probably due to unsupported actions (or a bug in the parser). +Table `nat' caught exception: `Reading ruleset failed! sanity_wf_ruleset check failed.'. Analysis not possible for this table. This is probably due to unsupported actions (or a bug in the parser). Parsed 3 chains in table raw, a total of 12 rules == Transformed to Isabelle type (only raw table) == [("OUTPUT",[]),("PREROUTING",[(-i lup, -j lupSpoofProtect (call)),(! -s 10.13.42.136/29 -i ldit, -j DROP),(! -s 10.13.42.128/29 -i lmd, -j DROP),(! -s 10.13.42.144/28 -i loben, -j DROP),(! -s 10.13.42.176/28 -i wg, -j DROP),(! -s 10.13.42.160/28 -i wt, -j DROP)]),("lupSpoofProtect",[(-s 192.168.0.0/16, -j DROP),(-s 10.0.0.0/8, -j DROP),(-s 172.16.0.0/12, -j DROP),(-d 192.168.0.0/16, -j DROP),(-d 10.0.0.0/8, -j DROP),(-d 172.16.0.0/12, -j DROP)])] diff --git a/haskell_tool/test/Suites/Parser.hs b/haskell_tool/test/Suites/Parser.hs index 78f39e70..aa66debc 100644 --- a/haskell_tool/test/Suites/Parser.hs +++ b/haskell_tool/test/Suites/Parser.hs @@ -10,7 +10,7 @@ import Test.Tasty import Test.Tasty.HUnit as HU --- we always have "-m protocolid:0" (note the 0) because we havn't filled the protocol yet +-- we always have "-m protocolid:0" (note the 0) because we haven't filled the protocol yet expected_result = "*filter\n\ \:DOS~Pro-t_ect - [0:0]\n\ \:FORWARD DROP [0:0]\n\ @@ -323,7 +323,7 @@ test_service_matrix ipassmtMaybeString fileName expected_result errormsg = do test_topoS_generated_service_matrix = HU.testCase "test_topoS_generated_service_matrix" $ test_service_matrix Nothing rulesetFile expected_result errormsg - where rulesetFile = "../thy/Iptables_Semantics/Examples/topoS_generated/imaginray_factory_network.iptables-save.by-linux-kernel" + where rulesetFile = "../thy/Iptables_Semantics/Examples/topoS_generated/imaginary_factory_network.iptables-save.by-linux-kernel" errormsg = "service marix topoS_generated differs" expected_result = ([ ("10.8.8.1","10.8.8.1") diff --git a/haskell_tool/test/Suites/ParserHelper.hs b/haskell_tool/test/Suites/ParserHelper.hs index 4a735bff..a223da9a 100644 --- a/haskell_tool/test/Suites/ParserHelper.hs +++ b/haskell_tool/test/Suites/ParserHelper.hs @@ -13,7 +13,7 @@ import Test.Tasty.HUnit as HU -test_parse_show_identiy parser str = HU.testCase ("ParserHelper: parse show identity ("++str++")") $ do +test_parse_show_identity parser str = HU.testCase ("ParserHelper: parse show identity ("++str++")") $ do let result = case runParser eofParser () "" str of Left err -> do error $ show err Right result -> show result @@ -36,21 +36,21 @@ test_parse_quotedstring str = HU.testCase ("ParserHelper: quoted string (`"++str --TODO negative tests! tests = testGroup "ParserHelper Unit tests" $ - [ test_parse_show_identiy Network.IPTables.ParserHelper.ipv4addr "192.168.0.1" - , test_parse_show_identiy Network.IPTables.ParserHelper.ipv4cidr "192.168.0.1/24" - , test_parse_show_identiy Network.IPTables.ParserHelper.ipv4range "192.168.0.1-192.168.0.255" - , test_parse_show_identiy Network.IPTables.ParserHelper.ipv6addr "::" - , test_parse_show_identiy Network.IPTables.ParserHelper.ipv6cidr "::/64" - , test_parse_show_identiy Network.IPTables.ParserHelper.ipv6range "::-::" - , test_parse_show_identiy Network.IPTables.ParserHelper.ipv6addr "2001:db8::8:800:200c:417a" - , test_parse_show_identiy Network.IPTables.ParserHelper.ipv6addr "ff01::101" - , test_parse_show_identiy Network.IPTables.ParserHelper.ipv6addr "::8:800:200c:417a" - , test_parse_show_identiy Network.IPTables.ParserHelper.ipv6addr "2001:db8::" - , test_parse_show_identiy Network.IPTables.ParserHelper.ipv6addr "ff00::" - , test_parse_show_identiy Network.IPTables.ParserHelper.ipv6addr "fe80::" - , test_parse_show_identiy Network.IPTables.ParserHelper.ipv6addr "::" - , test_parse_show_identiy Network.IPTables.ParserHelper.ipv6addr "::1" - , test_parse_show_identiy Network.IPTables.ParserHelper.ipv6addr "2001:db8::1" + [ test_parse_show_identity Network.IPTables.ParserHelper.ipv4addr "192.168.0.1" + , test_parse_show_identity Network.IPTables.ParserHelper.ipv4cidr "192.168.0.1/24" + , test_parse_show_identity Network.IPTables.ParserHelper.ipv4range "192.168.0.1-192.168.0.255" + , test_parse_show_identity Network.IPTables.ParserHelper.ipv6addr "::" + , test_parse_show_identity Network.IPTables.ParserHelper.ipv6cidr "::/64" + , test_parse_show_identity Network.IPTables.ParserHelper.ipv6range "::-::" + , test_parse_show_identity Network.IPTables.ParserHelper.ipv6addr "2001:db8::8:800:200c:417a" + , test_parse_show_identity Network.IPTables.ParserHelper.ipv6addr "ff01::101" + , test_parse_show_identity Network.IPTables.ParserHelper.ipv6addr "::8:800:200c:417a" + , test_parse_show_identity Network.IPTables.ParserHelper.ipv6addr "2001:db8::" + , test_parse_show_identity Network.IPTables.ParserHelper.ipv6addr "ff00::" + , test_parse_show_identity Network.IPTables.ParserHelper.ipv6addr "fe80::" + , test_parse_show_identity Network.IPTables.ParserHelper.ipv6addr "::" + , test_parse_show_identity Network.IPTables.ParserHelper.ipv6addr "::1" + , test_parse_show_identity Network.IPTables.ParserHelper.ipv6addr "2001:db8::1" , test_parse_quotedstring "\"foo\"" , test_parse_quotedstring "\"foo with some \\\" escaped quote \"" , test_parse_quotedstring "\"foo with some escaped escape (\\\\) and proper ending quote \\\\\"" diff --git a/thy/Automatic_Refinement/Autoref_Bindings_HOL.thy b/thy/Automatic_Refinement/Autoref_Bindings_HOL.thy index 2ff6528e..9917fcdb 100644 --- a/thy/Automatic_Refinement/Autoref_Bindings_HOL.thy +++ b/thy/Automatic_Refinement/Autoref_Bindings_HOL.thy @@ -611,7 +611,7 @@ subsection "Examples" text {* Be careful to make the concrete type a schematic type variable. The default behaviour of @{text "schematic_lemma"} makes it a fixed variable, - that will not unify with the infered term! *} + that will not unify with the inferred term! *} schematic_goal "(?f::?'c,[1,2,3]@[4::nat])\?R" by autoref diff --git a/thy/Automatic_Refinement/Lib/Foldi.thy b/thy/Automatic_Refinement/Lib/Foldi.thy index 76b7b546..13bf7d7a 100644 --- a/thy/Automatic_Refinement/Lib/Foldi.thy +++ b/thy/Automatic_Refinement/Lib/Foldi.thy @@ -1,7 +1,7 @@ -(* Title: Interuptible Fold +(* Title: Interruptible Fold Author: Thomas Tuerk *) -section {* Interuptible Fold *} +section {* Interruptible Fold *} theory Foldi imports Main begin diff --git a/thy/Automatic_Refinement/Lib/Misc.thy b/thy/Automatic_Refinement/Lib/Misc.thy index 550dd323..832f46bf 100644 --- a/thy/Automatic_Refinement/Lib/Misc.thy +++ b/thy/Automatic_Refinement/Lib/Misc.thy @@ -6,7 +6,7 @@ (* CHANGELOG: - 2010-05-09: Removed AC, AI locales, they are superseeded by concepts + 2010-05-09: Removed AC, AI locales, they are superseded by concepts from OrderedGroups 2010-09-22: Merges with ext/Aux diff --git a/thy/Automatic_Refinement/Lib/Refine_Util.thy b/thy/Automatic_Refinement/Lib/Refine_Util.thy index dccad1de..b080e9f7 100644 --- a/thy/Automatic_Refinement/Lib/Refine_Util.thy +++ b/thy/Automatic_Refinement/Lib/Refine_Util.thy @@ -637,7 +637,7 @@ ML {* "resolve with premises" #> Method.setup @{binding elim_all} (Attrib.thms >> (fn thms => fn ctxt => SIMPLE_METHOD (elim_all_tac ctxt thms))) - "repeteadly apply elimination rules to all subgoals" + "repeatedly apply elimination rules to all subgoals" #> Method.setup @{binding subst_tac} eqsubst_inst_meth "single-step substitution (dynamic instantiation)" #> Method.setup @{binding clarsimp_all} ( diff --git a/thy/Automatic_Refinement/Parametricity/Param_Tool.thy b/thy/Automatic_Refinement/Parametricity/Param_Tool.thy index cb3c8a1a..1ffe5678 100644 --- a/thy/Automatic_Refinement/Parametricity/Param_Tool.thy +++ b/thy/Automatic_Refinement/Parametricity/Param_Tool.thy @@ -305,7 +305,7 @@ begin subsection \Convenience Tools\ ML {* - (* Prefix p_ or wrong type supresses generation of relAPP *) + (* Prefix p_ or wrong type suppresses generation of relAPP *) fun cnv_relAPP t = let fun consider (Var ((name,_),T)) = diff --git a/thy/Automatic_Refinement/Parametricity/Relators.thy b/thy/Automatic_Refinement/Parametricity/Relators.thy index 924933f7..e66baa55 100644 --- a/thy/Automatic_Refinement/Parametricity/Relators.thy +++ b/thy/Automatic_Refinement/Parametricity/Relators.thy @@ -758,7 +758,7 @@ lemma sv_add_invar: -subsection {* Miscellanneous *} +subsection {* Miscellaneous *} lemma rel_cong: "(f,g)\Id \ (x,y)\Id \ (f x, g y)\Id" by simp lemma rel_fun_cong: "(f,g)\Id \ (f x, g x)\Id" by simp lemma rel_arg_cong: "(x,y)\Id \ (f x, f y)\Id" by simp diff --git a/thy/Automatic_Refinement/Tool/Autoref_Fix_Rel.thy b/thy/Automatic_Refinement/Tool/Autoref_Fix_Rel.thy index 8f769240..0505d11a 100644 --- a/thy/Automatic_Refinement/Tool/Autoref_Fix_Rel.thy +++ b/thy/Automatic_Refinement/Tool/Autoref_Fix_Rel.thy @@ -10,7 +10,7 @@ text {* Priority tags are used to influence the ordering of refinement theorems. A priority tag defines two numeric priorities, a major and a minor priority. The major priority is considered first, the minor priority last, i.e., after - the homogenity and relator-priority criteria. + the homogeneity and relator-priority criteria. The default value for both priorities is 0. *} definition PRIO_TAG :: "int \ int \ bool" @@ -459,12 +459,12 @@ ML {* structure hom_rules = Named_Sorted_Thms ( val name = @{binding autoref_hom} - val description = "Autoref: Homogenity rules" + val description = "Autoref: Homogeneity rules" val sort = K I val transform = K ( fn thm => case Thm.concl_of thm of @{mpat "Trueprop (CONSTRAINT _ _)"} => [thm] - | _ => raise THM ("Invalid homogenity rule",~1,[thm]) + | _ => raise THM ("Invalid homogeneity rule",~1,[thm]) ) ) diff --git a/thy/Automatic_Refinement/Tool/Autoref_Gen_Algo.thy b/thy/Automatic_Refinement/Tool/Autoref_Gen_Algo.thy index 81714d7b..09cb3179 100644 --- a/thy/Automatic_Refinement/Tool/Autoref_Gen_Algo.thy +++ b/thy/Automatic_Refinement/Tool/Autoref_Gen_Algo.thy @@ -4,7 +4,7 @@ imports Autoref_Translate begin (* TODO/FIXME: The priority ordering is not yet suited for generic - algorithms! If a refinement rule is generic, the homogenity and relator + algorithms! If a refinement rule is generic, the homogeneity and relator measures make no sense, as they are applied to schematic variables. However, currently generic algorithms seem to have lower priority than specific ones, so we can probably live with this problem for a while. diff --git a/thy/Automatic_Refinement/Tool/Autoref_Tool.thy b/thy/Automatic_Refinement/Tool/Autoref_Tool.thy index b220c793..c724450a 100644 --- a/thy/Automatic_Refinement/Tool/Autoref_Tool.thy +++ b/thy/Automatic_Refinement/Tool/Autoref_Tool.thy @@ -107,13 +107,13 @@ method_setup autoref_trans_step = {* Scan.succeed (fn ctxt => SIMPLE_METHOD' ( Autoref_Translate.trans_dbg_step_tac (Autoref_Phases.init_data ctxt) )) - *} "Single translation step, leaving unsolved side-coditions" + *} "Single translation step, leaving unsolved side-conditions" method_setup autoref_trans_step_only = {* Scan.succeed (fn ctxt => SIMPLE_METHOD' ( Autoref_Translate.trans_step_only_tac (Autoref_Phases.init_data ctxt) )) - *} "Single translation step, not attempting to solve side-coditions" + *} "Single translation step, not attempting to solve side-conditions" method_setup autoref_side = {* Scan.succeed (fn ctxt => SIMPLE_METHOD' ( diff --git a/thy/Automatic_Refinement/Tool/Autoref_Translate.thy b/thy/Automatic_Refinement/Tool/Autoref_Translate.thy index 04374bca..57567601 100644 --- a/thy/Automatic_Refinement/Tool/Autoref_Translate.thy +++ b/thy/Automatic_Refinement/Tool/Autoref_Translate.thy @@ -269,7 +269,7 @@ ML {* val msg = case concl of @{mpat "Trueprop (DEFER_tag _)"} - => Pretty.str "Could not solve defered side condition" + => Pretty.str "Could not solve deferred side condition" | @{mpat "Trueprop ((_,_)\_)"} => Pretty.str "Could not refine subterm" | _ => Pretty.str "Internal: Unexpected goal" diff --git a/thy/IP_Addresses/IP_Address.thy b/thy/IP_Addresses/IP_Address.thy index 9bd21a89..357daff8 100644 --- a/thy/IP_Addresses/IP_Address.thy +++ b/thy/IP_Addresses/IP_Address.thy @@ -7,7 +7,7 @@ imports Word_More WordInterval begin -section \Modelling IP Adresses\ +section \Modelling IP Addresses\ text\An IP address is basically an unsigned integer. We model IP addresses of arbitrary lengths. @@ -110,7 +110,7 @@ subsection\Sets of IP Addresses\ by(simp add: ipset_from_cidr_alt bitmagic_zeroLast_leq_or1Last) text{*Though we can write 192.168.1.2/24, we say that 192.168.0.0/24 is well-formed.*} - lemma ipset_from_cidr_base_wellforemd: fixes base:: "'i::len word" + lemma ipset_from_cidr_base_wellformed: fixes base:: "'i::len word" assumes "mask (len_of TYPE('i) - l) AND base = 0" shows "ipset_from_cidr base l = {base .. base OR mask (len_of TYPE('i) - l)}" proof - @@ -135,7 +135,7 @@ subsection\Sets of IP Addresses\ proof - have obviously: "mask (len_of TYPE('i) - n) = 0" by (simp add: assms) show ?thesis - apply(subst ipset_from_cidr_base_wellforemd) + apply(subst ipset_from_cidr_base_wellformed) subgoal using assms by simp by (simp add: obviously) qed @@ -172,7 +172,7 @@ subsection\Sets of IP Addresses\ apply(subst ipset_from_cidr_alt2[symmetric]) apply(subst zero_base_lsb_imp_set_eq_as_bit_operation) apply(simp; fail) - apply(subst ipset_from_cidr_base_wellforemd) + apply(subst ipset_from_cidr_base_wellformed) apply(simp; fail) apply(simp) done diff --git a/thy/IP_Addresses/IPv4.thy b/thy/IP_Addresses/IPv4.thy index db3875e1..6fd4c244 100644 --- a/thy/IP_Addresses/IPv4.thy +++ b/thy/IP_Addresses/IPv4.thy @@ -8,17 +8,17 @@ imports IP_Address begin -section \IPv4 Adresses\ +section \IPv4 Addresses\ text\An IPv4 address is basically a 32 bit unsigned integer.\ type_synonym ipv4addr = "32 word" - text\Conversion between natural numbers and IPv4 adresses\ + text\Conversion between natural numbers and IPv4 addresses\ definition nat_of_ipv4addr :: "ipv4addr \ nat" where "nat_of_ipv4addr a = unat a" definition ipv4addr_of_nat :: "nat \ ipv4addr" where "ipv4addr_of_nat n = of_nat n" - text\The maximum IPv4 addres\ + text\The maximum IPv4 address\ definition max_ipv4_addr :: "ipv4addr" where "max_ipv4_addr \ ipv4addr_of_nat ((2^32) - 1)" @@ -42,7 +42,7 @@ section \IPv4 Adresses\ lemma ipv4addr_of_nat_nat_of_ipv4addr: "ipv4addr_of_nat (nat_of_ipv4addr addr) = addr" by(simp add: ipv4addr_of_nat_def nat_of_ipv4addr_def) -subsection\Representing IPv4 Adresses (Syntax)\ +subsection\Representing IPv4 Addresses (Syntax)\ fun ipv4addr_of_dotdecimal :: "nat \ nat \ nat \ nat \ ipv4addr" where "ipv4addr_of_dotdecimal (a,b,c,d) = ipv4addr_of_nat (d + 256 * c + 65536 * b + 16777216 * a )" diff --git a/thy/IP_Addresses/IPv6.thy b/thy/IP_Addresses/IPv6.thy index 5189e4fe..2c17b7fa 100644 --- a/thy/IP_Addresses/IPv6.thy +++ b/thy/IP_Addresses/IPv6.thy @@ -12,7 +12,7 @@ section \IPv6 Addresses\ text\An IPv6 address is basically a 128 bit unsigned integer. RFC 4291, Section 2.\ type_synonym ipv6addr = "128 word" - text\Conversion between natural numbers and IPv6 adresses\ + text\Conversion between natural numbers and IPv6 addresses\ definition nat_of_ipv6addr :: "ipv6addr \ nat" where "nat_of_ipv6addr a = unat a" definition ipv6addr_of_nat :: "nat \ ipv6addr" where @@ -45,7 +45,7 @@ section \IPv6 Addresses\ lemma ipv6addr_of_nat_nat_of_ipv6addr: "ipv6addr_of_nat (nat_of_ipv6addr addr) = addr" by(simp add: ipv6addr_of_nat_def nat_of_ipv6addr_def) -subsection\Syntax of IPv6 Adresses\ +subsection\Syntax of IPv6 Addresses\ text\RFC 4291, Section 2.2.: Text Representation of Addresses\ text\Quoting the RFC (note: errata exists):\ @@ -210,7 +210,7 @@ subsection\Syntax of IPv6 Adresses\ | [Some a, Some b, Some c, Some d, Some e, Some f, None, Some g] \ Some (IPv6AddrCompressed7_7 a b c d e f () g) | [Some a, Some b, Some c, Some d, Some e, Some f, Some g, None] \ Some (IPv6AddrCompressed8_7 a b c d e f g ()) - | _ \ None (*invalid ipv6 copressed address.*) + | _ \ None (*invalid ipv6 compressed address.*) )" fun ipv6addr_syntax_compressed_to_list :: "ipv6addr_syntax_compressed \ ((16 word) option) list" @@ -479,7 +479,7 @@ subsection\Semantics\ have ucast_ipv6_piece_rule: "length (dropWhile Not (to_bl w)) \ 16 \ (ucast::16 word \ 128 word) ((ucast::128 word \ 16 word) w) = w" for w::ipv6addr - by(rule ucast_short_ucast_long_ingoreLeadingZero) (simp_all) + by(rule ucast_short_ucast_long_ignoreLeadingZero) (simp_all) have ucast_ipv6_piece: "16 \ 128 - n \ (ucast::16 word \ 128 word) ((ucast::128 word \ 16 word) (w AND (mask 16 << n) >> n)) << n = w AND (mask 16 << n)" for w::ipv6addr and n::nat @@ -510,7 +510,7 @@ subsection\Semantics\ have ucast16_ucast128_masks_highest_bits0: "(ucast ((ucast::ipv6addr \ 16 word) (ip AND 0xFFFF))) = ip AND 0xFFFF" apply(subst word128_masks_ipv6pieces)+ - apply(subst ucast_short_ucast_long_ingoreLeadingZero) + apply(subst ucast_short_ucast_long_ignoreLeadingZero) apply simp_all by (simp add: length_drop_mask) @@ -635,14 +635,14 @@ definition ipv6_unparsed_compressed_to_preferred :: "((16 word) option) list \ ipv6addr_c2p ipv6compressed = ipv6prferred" + "ipv6_unparsed_compressed_to_preferred (ipv6addr_syntax_compressed_to_list ipv6compressed) = Some ipv6preferred + \ ipv6addr_c2p ipv6compressed = ipv6preferred" by(cases ipv6compressed) (simp_all add: ipv6_unparsed_compressed_to_preferred_def) (*1s*) lemma ipv6_unparsed_compressed_to_preferred_identity2: - "ipv6_unparsed_compressed_to_preferred ls = Some ipv6prferred + "ipv6_unparsed_compressed_to_preferred ls = Some ipv6preferred \ (\ipv6compressed. parse_ipv6_address_compressed ls = Some ipv6compressed \ - ipv6addr_c2p ipv6compressed = ipv6prferred)" + ipv6addr_c2p ipv6compressed = ipv6preferred)" apply(rule iffI) apply(subgoal_tac "parse_ipv6_address_compressed ls \ None") prefer 2 diff --git a/thy/IP_Addresses/Prefix_Match.thy b/thy/IP_Addresses/Prefix_Match.thy index c243d845..8d2cffa5 100644 --- a/thy/IP_Addresses/Prefix_Match.thy +++ b/thy/IP_Addresses/Prefix_Match.thy @@ -153,7 +153,7 @@ subsection\Equivalence Proofs\ unfolding prefix_match_semantics_wordset[OF assms] unfolding valid_prefix_ipset_from_netmask_ipset_from_cidr unfolding prefix_to_wordset_def - apply(subst ipset_from_cidr_base_wellforemd) + apply(subst ipset_from_cidr_base_wellformed) subgoal using assms by(simp add: valid_prefix_def pfxm_mask_def) by(simp add: pfxm_mask_def) diff --git a/thy/IP_Addresses/WordInterval.thy b/thy/IP_Addresses/WordInterval.thy index 84ad46ed..8916dfa8 100644 --- a/thy/IP_Addresses/WordInterval.thy +++ b/thy/IP_Addresses/WordInterval.thy @@ -595,7 +595,7 @@ lemma wordinterval_to_set_alt: "wordinterval_to_set r = {x. wordinterval_element lemma wordinterval_un_empty: "wordinterval_empty r1 \ wordinterval_eq (wordinterval_union r1 r2) r2" by(subst wordinterval_eq_set_eq, simp) -lemma wordinterval_un_emty_b: +lemma wordinterval_un_empty_b: "wordinterval_empty r2 \ wordinterval_eq (wordinterval_union r1 r2) r1" by(subst wordinterval_eq_set_eq, simp) diff --git a/thy/IP_Addresses/WordInterval_Sorted.thy b/thy/IP_Addresses/WordInterval_Sorted.thy index 6efccc35..ab24aa49 100644 --- a/thy/IP_Addresses/WordInterval_Sorted.thy +++ b/thy/IP_Addresses/WordInterval_Sorted.thy @@ -1,6 +1,6 @@ theory WordInterval_Sorted imports WordInterval - "../Automatic_Refinement/Lib/Misc" (*Mergesort. TODO: dependnecy! we need a mergesort afp entry!!*) + "../Automatic_Refinement/Lib/Misc" (*Mergesort. TODO: dependency! we need a mergesort afp entry!!*) "~~/src/HOL/Library/Product_Lexorder" begin diff --git a/thy/Iptables_Semantics/Common/Negation_Type_DNF.thy b/thy/Iptables_Semantics/Common/Negation_Type_DNF.thy index 3cb34b19..71886bda 100644 --- a/thy/Iptables_Semantics/Common/Negation_Type_DNF.thy +++ b/thy/Iptables_Semantics/Common/Negation_Type_DNF.thy @@ -118,7 +118,7 @@ subsubsection\inverting a DNF\ apply(simp add: map_a_and cnf_to_bool_append dnf_to_bool_append) by blast - lemma cnf_invert_singelton: "cnf_to_bool \ [invert a] \ \ cnf_to_bool \ [a]" by(cases a, simp_all) + lemma cnf_invert_singleton: "cnf_to_bool \ [invert a] \ \ cnf_to_bool \ [a]" by(cases a, simp_all) lemma cnf_singleton_false: "(\a'\set as. \ cnf_to_bool \ [a']) \ \ cnf_to_bool \ as" by(induction \ as rule: cnf_to_bool.induct) (simp_all) @@ -131,7 +131,7 @@ subsubsection\inverting a DNF\ apply(induction d) apply(simp_all) apply(simp add: listprepend_correct) - apply(simp add: cnf_invert_singelton cnf_singleton_false) + apply(simp add: cnf_invert_singleton cnf_singleton_false) done subsubsection\Optimizing\ diff --git a/thy/Iptables_Semantics/Datatype_Selectors.thy b/thy/Iptables_Semantics/Datatype_Selectors.thy index 6f3384e7..70aaaeab 100644 --- a/thy/Iptables_Semantics/Datatype_Selectors.thy +++ b/thy/Iptables_Semantics/Datatype_Selectors.thy @@ -22,7 +22,7 @@ fun wf_disc_sel :: "(('a \ bool) \ ('a \ 'b)) \ (\a. disc (C a)) \ (\a. disc (C a))" *) diff --git a/thy/Iptables_Semantics/Examples/TUM_Net_Firewall/README_cheating b/thy/Iptables_Semantics/Examples/TUM_Net_Firewall/README_cheating index 46fd84d2..15c93ed6 100644 --- a/thy/Iptables_Semantics/Examples/TUM_Net_Firewall/README_cheating +++ b/thy/Iptables_Semantics/Examples/TUM_Net_Firewall/README_cheating @@ -17,8 +17,8 @@ diff -u iptables_20.11.2013 iptables_20.11.2013_cheating The rule was simply dead. -We suffix everything which is based on this modified rulset (with those three rules removed) '_cheating'. -In earlier versions, we also remove the ESTABLISHED;REALTED rule. Details why this is (was) reasonable can be found in the paper: +We suffix everything which is based on this modified ruleset (with those three rules removed) '_cheating'. +In earlier versions, we also remove the ESTABLISHED;RELATED rule. Details why this is (was) reasonable can be found in the paper: http://www.net.in.tum.de/fileadmin/bibtex/publications/papers/fm15_Semantics-Preserving_Simplification_of_Real-World_Firewall_Rule_Sets.pdf diff --git a/thy/Iptables_Semantics/Examples/TUM_Net_Firewall/TUM_Simple_FW.thy b/thy/Iptables_Semantics/Examples/TUM_Net_Firewall/TUM_Simple_FW.thy index 5c4e5cc0..905dda65 100644 --- a/thy/Iptables_Semantics/Examples/TUM_Net_Firewall/TUM_Simple_FW.thy +++ b/thy/Iptables_Semantics/Examples/TUM_Net_Firewall/TUM_Simple_FW.thy @@ -6,7 +6,7 @@ begin section\Printing various version of the simplified firewall\ - text\We got lucky that we do not need to abstract over the primitives whcih are not supported by the simple firewall\ + text\We got lucky that we do not need to abstract over the primitives which are not supported by the simple firewall\ value[code] "let x = to_simple_firewall (upper_closure (packet_assume_new (unfold_ruleset_FORWARD net_fw_3_FORWARD_default_policy (map_of net_fw_3)))) diff --git a/thy/Iptables_Semantics/Examples/medium-sized-company/Analyze_medium_sized_company.thy b/thy/Iptables_Semantics/Examples/medium-sized-company/Analyze_medium_sized_company.thy index 57806bf0..c26c9a0a 100644 --- a/thy/Iptables_Semantics/Examples/medium-sized-company/Analyze_medium_sized_company.thy +++ b/thy/Iptables_Semantics/Examples/medium-sized-company/Analyze_medium_sized_company.thy @@ -68,7 +68,7 @@ lemma "simple_fw_valid (to_simple_firewall (lower_closure (optimize_matches abstract_for_simple_firewall (lower_closure (packet_assume_new unfolded_FORWARD)))))" by eval (* -text{*If we call the IP address spcae partitioning incorrectly (not prepocessed, still has interfaces), we get an error*} +text{*If we call the IP address space partitioning incorrectly (not preprocessed, still has interfaces), we get an error*} value[code] " parts_connection_ssh (to_simple_firewall (upper_closure (optimize_matches abstract_for_simple_firewall (upper_closure (packet_assume_new unfolded_FORWARD)))))" *) diff --git a/thy/Iptables_Semantics/Examples/sns.ias.edu/test.sh b/thy/Iptables_Semantics/Examples/sns.ias.edu/test.sh index 57340092..ae07d299 100755 --- a/thy/Iptables_Semantics/Examples/sns.ias.edu/test.sh +++ b/thy/Iptables_Semantics/Examples/sns.ias.edu/test.sh @@ -12,7 +12,7 @@ modprobe ip_conntrack modprobe ip_conntrack_ftp # These lines are here in case rules are already in place and the script is ever rerun on the fly. -# We want to remove all rules and pre-exisiting user defined chains and zero the counters +# We want to remove all rules and pre-existing user defined chains and zero the counters # before we implement new rules. iptables -F iptables -X diff --git a/thy/Iptables_Semantics/Examples/topoS_generated/Analyze_topos_generated.thy b/thy/Iptables_Semantics/Examples/topoS_generated/Analyze_topos_generated.thy index 2e2414e3..e05344b8 100644 --- a/thy/Iptables_Semantics/Examples/topoS_generated/Analyze_topos_generated.thy +++ b/thy/Iptables_Semantics/Examples/topoS_generated/Analyze_topos_generated.thy @@ -5,7 +5,7 @@ begin -parse_iptables_save factory_fw="imaginray_factory_network.iptables-save.by-linux-kernel" +parse_iptables_save factory_fw="imaginary_factory_network.iptables-save.by-linux-kernel" thm factory_fw_def thm factory_fw_FORWARD_default_policy_def diff --git a/thy/Iptables_Semantics/Examples/topoS_generated/imaginray_factory_network.iptables-save b/thy/Iptables_Semantics/Examples/topoS_generated/imaginary_factory_network.iptables-save similarity index 100% rename from thy/Iptables_Semantics/Examples/topoS_generated/imaginray_factory_network.iptables-save rename to thy/Iptables_Semantics/Examples/topoS_generated/imaginary_factory_network.iptables-save diff --git a/thy/Iptables_Semantics/Examples/topoS_generated/imaginray_factory_network.iptables-save.by-linux-kernel b/thy/Iptables_Semantics/Examples/topoS_generated/imaginary_factory_network.iptables-save.by-linux-kernel similarity index 100% rename from thy/Iptables_Semantics/Examples/topoS_generated/imaginray_factory_network.iptables-save.by-linux-kernel rename to thy/Iptables_Semantics/Examples/topoS_generated/imaginary_factory_network.iptables-save.by-linux-kernel diff --git a/thy/Iptables_Semantics/Firewall_Common.thy b/thy/Iptables_Semantics/Firewall_Common.thy index 536093a3..3a7a7d56 100644 --- a/thy/Iptables_Semantics/Firewall_Common.thy +++ b/thy/Iptables_Semantics/Firewall_Common.thy @@ -69,7 +69,7 @@ fun opt_MatchAny_match_expr_once :: "'a match_expr \ 'a match_expr" "opt_MatchAny_match_expr_once (MatchAnd _ (MatchNot MatchAny)) = (MatchNot MatchAny)" | "opt_MatchAny_match_expr_once (MatchAnd (MatchNot MatchAny) _) = (MatchNot MatchAny)" | "opt_MatchAny_match_expr_once (MatchAnd m1 m2) = MatchAnd (opt_MatchAny_match_expr_once m1) (opt_MatchAny_match_expr_once m2)" -(* without recursive call: need to apply multiple times until it stabelizes *) +(* without recursive call: need to apply multiple times until it stabilizes *) text\It is still a good idea to apply @{const opt_MatchAny_match_expr_once} multiple times. Example:\ @@ -130,7 +130,7 @@ text\Structural properties about match expressions\ "matcheq_matchNone (MatchNot (MatchAnd m1 m2)) \ matcheq_matchNone (MatchNot m1) \ matcheq_matchNone (MatchNot m2)" | "matcheq_matchNone (MatchAnd m1 m2) \ matcheq_matchNone m1 \ matcheq_matchNone m2" - lemma matachAny_matchNone: "\ has_primitive m \ matcheq_matchAny m \ \ matcheq_matchNone m" + lemma matchAny_matchNone: "\ has_primitive m \ matcheq_matchAny m \ \ matcheq_matchNone m" by(induction m rule: matcheq_matchNone.induct)(simp_all) lemma matcheq_matchNone_no_primitive: "\ has_primitive m \ matcheq_matchNone (MatchNot m) \ \ matcheq_matchNone m" diff --git a/thy/Iptables_Semantics/Matching.thy b/thy/Iptables_Semantics/Matching.thy index 480cd61d..9617b650 100644 --- a/thy/Iptables_Semantics/Matching.thy +++ b/thy/Iptables_Semantics/Matching.thy @@ -23,7 +23,7 @@ lemma matcheq_matchAny: "\ has_primitive m \ matcheq_matchA by(induction m) simp_all lemma matcheq_matchNone: "\ has_primitive m \ matcheq_matchNone m \ \ matches \ m p" - by(auto dest: matcheq_matchAny matachAny_matchNone) + by(auto dest: matcheq_matchAny matchAny_matchNone) lemma matcheq_matchNone_not_matches: "matcheq_matchNone m \ \ matches \ m p" by(induction m rule: matcheq_matchNone.induct) auto diff --git a/thy/Iptables_Semantics/No_Spoof_Embeddings.thy b/thy/Iptables_Semantics/No_Spoof_Embeddings.thy index 5dada3eb..87cd9d9a 100644 --- a/thy/Iptables_Semantics/No_Spoof_Embeddings.thy +++ b/thy/Iptables_Semantics/No_Spoof_Embeddings.thy @@ -8,7 +8,7 @@ section\Spoofing protection in Ternary Semantics implies Spoofing protecti text\If @{const no_spoofing} is shown in the ternary semantics, it implies that no spoofing is possible in the Boolean semantics with magic oracle. We only assume that the oracle agrees with the @{const common_matcher} on the not-unknown parts.\ - lemma approximating_imp_booloan_semantics_nospoofing: + lemma approximating_imp_boolean_semantics_nospoofing: assumes "matcher_agree_on_exact_matches \ common_matcher" and "simple_ruleset rs" and no_spoofing: "no_spoofing TYPE('pkt_ext) ipassmt rs" @@ -38,7 +38,7 @@ text\If @{const no_spoofing} is shown in the ternary semantics, it implies and no_spoofing: "no_spoofing TYPE('pkt_ext) ipassmt rs" and "iface \ dom ipassmt" shows "{p_src p | p :: ('i::len,'pkt_ext) tagged_packet_scheme. (\,\,p\p_iiface:=iface_sel iface\\ \rs, Undecided\ \ Decision FinalAllow)} \ ipcidr_union_set (set (the (ipassmt iface)))" - using approximating_imp_booloan_semantics_nospoofing[OF assms(1) assms(2) assms(3), where \=\] + using approximating_imp_boolean_semantics_nospoofing[OF assms(1) assms(2) assms(3), where \=\] using assms(4) by blast @@ -55,7 +55,7 @@ text\If @{const no_spoofing} is shown in the ternary semantics, it implies { assume no_spoofing: "no_spoofing TYPE('pkt_ext) ipassmt rs" have "{p_src p | p :: ('i,'pkt_ext) tagged_packet_scheme. (\,\,p\p_iiface:=iface_sel iface\\ \rs, Undecided\ \ Decision FinalAllow)} \ ipcidr_union_set (set (the (ipassmt iface)))" - using approximating_imp_booloan_semantics_nospoofing[OF assms(1) assms(2) no_spoofing, where \=\] + using approximating_imp_boolean_semantics_nospoofing[OF assms(1) assms(2) no_spoofing, where \=\] using assms(5) by blast } with no_spoofing_iface[OF assms(2) assms(3) no_spoofing_executable] show ?thesis by blast diff --git a/thy/Iptables_Semantics/Primitive_Matchers/Code_Interface.thy b/thy/Iptables_Semantics/Primitive_Matchers/Code_Interface.thy index 922e1bb3..9e67d6f4 100644 --- a/thy/Iptables_Semantics/Primitive_Matchers/Code_Interface.thy +++ b/thy/Iptables_Semantics/Primitive_Matchers/Code_Interface.thy @@ -106,7 +106,7 @@ begin IIface (Iface ''foobar''), Extra ''--name DEFAULT --rsource'']" by eval - private lemma eval_ternary_And_Unknown_Unkown: + private lemma eval_ternary_And_Unknown_Unknown: "eval_ternary_And TernaryUnknown (eval_ternary_And TernaryUnknown tv) = eval_ternary_And TernaryUnknown tv" by(cases tv) (simp_all) @@ -127,7 +127,7 @@ begin case (2 a1 a2) thus ?case apply(simp add: is_pos_Extra_alist_and) apply(cases a1) - apply(simp_all add: eval_ternary_And_Unknown_Unkown) + apply(simp_all add: eval_ternary_And_Unknown_Unknown) done next case 3 thus ?case by(simp) @@ -197,7 +197,7 @@ end -(*currently unused and unverifed. may be needed for future use*) +(*currently unused and unverified. may be needed for future use*) definition prefix_to_strange_inverse_cisco_mask:: "nat \ (nat \ nat \ nat \ nat)" where "prefix_to_strange_inverse_cisco_mask n \ dotdecimal_of_ipv4addr ( (NOT (((mask n)::ipv4addr) << (32 - n))) )" lemma "prefix_to_strange_inverse_cisco_mask 8 = (0, 255, 255, 255)" by eval diff --git a/thy/Iptables_Semantics/Primitive_Matchers/Common_Primitive_Matcher_Generic.thy b/thy/Iptables_Semantics/Primitive_Matchers/Common_Primitive_Matcher_Generic.thy index 2a7ab1bf..5efe08b2 100644 --- a/thy/Iptables_Semantics/Primitive_Matchers/Common_Primitive_Matcher_Generic.thy +++ b/thy/Iptables_Semantics/Primitive_Matchers/Common_Primitive_Matcher_Generic.thy @@ -68,7 +68,7 @@ begin by(auto simp add: Ports_single_not Ports_single Prot_single_not Prot_single) - lemma multiports_disjuction: + lemma multiports_disjunction: "(\rg\set spts. matches (\, \) (Match (Src_Ports (L4Ports proto [rg]))) a p) \ matches (\, \) (Match (Src_Ports (L4Ports proto spts))) a p" "(\rg\set dpts. matches (\, \) (Match (Dst_Ports (L4Ports proto [rg]))) a p) \ matches (\, \) (Match (Dst_Ports (L4Ports proto dpts))) a p" by(auto simp add: Src_Ports Dst_Ports match_raw_ternary bool_to_ternary_simps ports_to_set diff --git a/thy/Iptables_Semantics/Primitive_Matchers/Conntrack_State.thy b/thy/Iptables_Semantics/Primitive_Matchers/Conntrack_State.thy index 2245f485..1b273c5a 100644 --- a/thy/Iptables_Semantics/Primitive_Matchers/Conntrack_State.thy +++ b/thy/Iptables_Semantics/Primitive_Matchers/Conntrack_State.thy @@ -29,7 +29,7 @@ function ctstate_set_toString_list :: "ctstate set \ string list" wh if CT_New \ S then ''NEW''#ctstate_set_toString_list (S - {CT_New}) else if CT_Established \ S then ''ESTABLISHED''#ctstate_set_toString_list (S - {CT_Established}) else if CT_Related \ S then ''RELATED''#ctstate_set_toString_list (S - {CT_Related}) else - if CT_Untracked \ S then ''UNTRACKED''#ctstate_set_toString_list (S - {CT_Untracked}) else [''ERROR-unkown-ctstate''])" + if CT_Untracked \ S then ''UNTRACKED''#ctstate_set_toString_list (S - {CT_Untracked}) else [''ERROR-unknown-ctstate''])" by(pat_completeness) auto termination ctstate_set_toString_list diff --git a/thy/Iptables_Semantics/Primitive_Matchers/Interfaces_Normalize.thy b/thy/Iptables_Semantics/Primitive_Matchers/Interfaces_Normalize.thy index 7ff4d56a..d8349947 100644 --- a/thy/Iptables_Semantics/Primitive_Matchers/Interfaces_Normalize.thy +++ b/thy/Iptables_Semantics/Primitive_Matchers/Interfaces_Normalize.thy @@ -135,7 +135,7 @@ begin "normalized_n_primitive (disc, sel) P m \ (\a. \ disc (IIface a)) \ normalized_nnf_match m \ compress_normalize_input_interfaces m = Some m' \ normalized_nnf_match m' \ normalized_n_primitive (disc, sel) P m'" unfolding compress_normalize_input_interfaces_def - using compress_normalize_primitve_preserves_normalized_n_primitive[OF _ wf_disc_sel_common_primitive(5)] by blast + using compress_normalize_primitive_preserves_normalized_n_primitive[OF _ wf_disc_sel_common_primitive(5)] by blast @@ -208,7 +208,7 @@ begin "normalized_n_primitive (disc, sel) P m \ (\a. \ disc (OIface a)) \ normalized_nnf_match m \ compress_normalize_output_interfaces m = Some m' \ normalized_nnf_match m' \ normalized_n_primitive (disc, sel) P m'" unfolding compress_normalize_output_interfaces_def - using compress_normalize_primitve_preserves_normalized_n_primitive[OF _ wf_disc_sel_common_primitive(6)] by blast + using compress_normalize_primitive_preserves_normalized_n_primitive[OF _ wf_disc_sel_common_primitive(6)] by blast end diff --git a/thy/Iptables_Semantics/Primitive_Matchers/IpAddresses.thy b/thy/Iptables_Semantics/Primitive_Matchers/IpAddresses.thy index 47e82c49..ef94b60b 100644 --- a/thy/Iptables_Semantics/Primitive_Matchers/IpAddresses.thy +++ b/thy/Iptables_Semantics/Primitive_Matchers/IpAddresses.thy @@ -8,7 +8,7 @@ begin --"Misc" -(*we dont't have an empty ip space, but a space which only contains the 0 address. We will use the option type to denote the empty space in some functions.*) +(*we don't have an empty ip space, but a space which only contains the 0 address. We will use the option type to denote the empty space in some functions.*) lemma "ipset_from_cidr (ipv4addr_of_dotdecimal (0, 0, 0, 0)) 33 = {0}" by(simp add: ipv4addr_of_dotdecimal.simps ipv4addr_of_nat_def ipset_from_cidr_large_pfxlen) diff --git a/thy/Iptables_Semantics/Primitive_Matchers/Ipassmt.thy b/thy/Iptables_Semantics/Primitive_Matchers/Ipassmt.thy index 135cb28f..65f87e85 100644 --- a/thy/Iptables_Semantics/Primitive_Matchers/Ipassmt.thy +++ b/thy/Iptables_Semantics/Primitive_Matchers/Ipassmt.thy @@ -3,7 +3,7 @@ imports Common_Primitive_Syntax "../Semantics_Ternary/Primitive_Normalization" "../../Simple_Firewall/Primitives/Iface" "../../Simple_Firewall/Common/IP_Addr_WordInterval_toString" (*for debug pretty-printing*) - "../../Automatic_Refinement/Lib/Misc" (*dependnecy!*) + "../../Automatic_Refinement/Lib/Misc" (*dependency!*) begin text\A mapping from an interface to its assigned ip addresses in CIDR notation\ @@ -36,7 +36,7 @@ subsection\Sanity checking for an @{typ "'i ipassignment"}.\ distinct (map fst ipassmt) \ ipassmt_sanity_nowildcards (map_of ipassmt) then map_of ipassmt - else undefined (*undefined_ipassmt_must_be_distinct_and_dont_have_wildcard_interfaces*))" + else undefined (*undefined_ipassmt_must_be_distinct_and_not_have_wildcard_interfaces*))" text\some additional (optional) sanity checks\ @@ -365,8 +365,8 @@ subsection\IP Assignment difference\ [(d, (k a b d, k b a d)). d \ remdups (map fst (a @ b))]" - text\If an interface is defined in both ipassignments and there is no difference - then the two ipassignements describe the same IP range for this interface.\ + text\If an interface is defined in both ip and there is no difference + then the two ip assignments describe the same IP range for this interface.\ lemma ipassmt_diff_ifce_equal: "(ifce, [], []) \ set (ipassmt_diff ipassmt1 ipassmt2) \ ifce \ dom (map_of ipassmt1) \ ifce \ dom (map_of ipassmt2) \ ipcidr_union_set (set (the ((map_of ipassmt1) ifce))) = diff --git a/thy/Iptables_Semantics/Primitive_Matchers/L4_Protocol_Flags.thy b/thy/Iptables_Semantics/Primitive_Matchers/L4_Protocol_Flags.thy index 3c12ac7e..a3df201f 100644 --- a/thy/Iptables_Semantics/Primitive_Matchers/L4_Protocol_Flags.thy +++ b/thy/Iptables_Semantics/Primitive_Matchers/L4_Protocol_Flags.thy @@ -75,7 +75,7 @@ section\Matching TCP Flags\ else (\ c1 \ fmask1) \ (\ c2 \ fmask2))" context begin - private lemma funny_set_falg_fmask_helper: "c2 \ fmask2 \ (c1 = c2 \ fmask1 = fmask2) = (\pkt. (pkt \ fmask1 = c1) = (pkt \ fmask2 = c2))" + private lemma funny_set_flag_fmask_helper: "c2 \ fmask2 \ (c1 = c2 \ fmask1 = fmask2) = (\pkt. (pkt \ fmask1 = c1) = (pkt \ fmask2 = c2))" apply rule apply presburger apply(subgoal_tac "fmask1 = fmask2") @@ -127,7 +127,7 @@ section\Matching TCP Flags\ apply(cases f1, cases f2, simp) apply(rename_tac fmask1 c1 fmask2 c2) apply(intro conjI impI) - using funny_set_falg_fmask_helper apply metis + using funny_set_flag_fmask_helper apply metis apply blast done end diff --git a/thy/Iptables_Semantics/Primitive_Matchers/No_Spoof.thy b/thy/Iptables_Semantics/Primitive_Matchers/No_Spoof.thy index 607dd79b..318d6fa0 100644 --- a/thy/Iptables_Semantics/Primitive_Matchers/No_Spoof.thy +++ b/thy/Iptables_Semantics/Primitive_Matchers/No_Spoof.thy @@ -365,7 +365,7 @@ begin text\The following algorithm sound but not complete.\ - (*alowed: set ip ips potentially allowed for iface + (*allowed: set ip ips potentially allowed for iface denied: set of ips definitely dropped for iface*) private fun no_spoofing_algorithm :: "iface \ 'i::len ipassignment \ 'i common_primitive rule list \ 'i word set \ 'i word set \ bool" where @@ -692,7 +692,7 @@ text\Examples\ Rule MatchAny action.Drop]" apply(rule no_spoofing_iface) apply(simp_all add: simple_ruleset_def) (*simple and nnf*) - by eval (*executable spoofing alogorithm*) + by eval (*executable spoofing algorithm*) text\Example 2: @@ -717,7 +717,7 @@ text\Examples\ \ lemma "\ no_spoofing TYPE('pkt_ext) [Iface ''eth0'' \ [(ipv4addr_of_dotdecimal (192,168,0,0), 24)]] - [Rule (MatchNot (Match (IIface (Iface ''wlan+'')))) action.Accept, (*accidently allow everything for eth0*) + [Rule (MatchNot (Match (IIface (Iface ''wlan+'')))) action.Accept, (*accidentally allow everything for eth0*) Rule (MatchAnd (MatchNot (Match (Src (IpAddrNetmask (ipv4addr_of_dotdecimal (192,168,0,0)) 24)))) (Match (IIface (Iface ''eth0'')))) action.Drop, Rule MatchAny action.Accept] " @@ -739,7 +739,7 @@ text\Examples\ lemma "\ no_spoofing_iface (Iface ''eth0'') [Iface ''eth0'' \ [(ipv4addr_of_dotdecimal (192,168,0,0), 24)]] - [Rule (MatchNot (Match (IIface (Iface ''wlan+'')))) action.Accept, (*accidently allow everything for eth0*) + [Rule (MatchNot (Match (IIface (Iface ''wlan+'')))) action.Accept, (*accidentally allow everything for eth0*) Rule (MatchAnd (MatchNot (Match (Src (IpAddrNetmask (ipv4addr_of_dotdecimal (192,168,0,0)) 24)))) (Match (IIface (Iface ''eth0'')))) action.Drop, Rule MatchAny action.Accept] " by eval diff --git a/thy/Iptables_Semantics/Primitive_Matchers/Parser.thy b/thy/Iptables_Semantics/Primitive_Matchers/Parser.thy index c6a42ed0..b00e6098 100644 --- a/thy/Iptables_Semantics/Primitive_Matchers/Parser.thy +++ b/thy/Iptables_Semantics/Primitive_Matchers/Parser.thy @@ -447,7 +447,7 @@ local else r end; fun append table chain rule = case FirewallTable.lookup table chain - of NONE => raise Fail ("uninitialized cahin: "^chain) + of NONE => raise Fail ("uninitialized chain: "^chain) | SOME rules => FirewallTable.update (chain, rules@[rule]) table fun mk_Rule (tbl: firewall_table) (chain: string, target : (parsed_action_type * string) option, t : term) = diff --git a/thy/Iptables_Semantics/Primitive_Matchers/Parser6.thy b/thy/Iptables_Semantics/Primitive_Matchers/Parser6.thy index f8bb4f93..3b9f6dcc 100644 --- a/thy/Iptables_Semantics/Primitive_Matchers/Parser6.thy +++ b/thy/Iptables_Semantics/Primitive_Matchers/Parser6.thy @@ -451,7 +451,7 @@ local else r end; fun append table chain rule = case FirewallTable.lookup table chain - of NONE => raise Fail ("uninitialized cahin: "^chain) + of NONE => raise Fail ("uninitialized chain: "^chain) | SOME rules => FirewallTable.update (chain, rules@[rule]) table fun mk_Rule (tbl: firewall_table) (chain: string, target : (parsed_action_type * string) option, t : term) = diff --git a/thy/Iptables_Semantics/Primitive_Matchers/Ports_Normalize.thy b/thy/Iptables_Semantics/Primitive_Matchers/Ports_Normalize.thy index 1d875c5f..fdf3d72b 100644 --- a/thy/Iptables_Semantics/Primitive_Matchers/Ports_Normalize.thy +++ b/thy/Iptables_Semantics/Primitive_Matchers/Ports_Normalize.thy @@ -541,7 +541,7 @@ subsection\Normalizing Positive Matches on Ports\ shows "m' \ set (normalize_positive_ports_step (disc,sel) C m) \ normalized_nnf_match m'" apply(simp add: normalize_positive_ports_step_def) apply(elim exE conjE, rename_tac rst spts) - apply(drule sym) (*switch primitive_extrartor = *) + apply(drule sym) (*switch primitive_extractor = *) apply(frule primitive_extractor_correct(2)[OF n wf_disc_sel]) apply(subgoal_tac "getNeg spts = []") (*duplication above*) prefer 2 subgoal @@ -559,7 +559,7 @@ subsection\Normalizing Positive Matches on Ports\ apply(intro ballI, rename_tac m') apply(simp) apply(elim exE conjE, rename_tac rst spts) - apply(drule sym) (*switch primitive_extrartor = *) + apply(drule sym) (*switch primitive_extractor = *) apply(frule primitive_extractor_correct(2)[OF n wf_disc_sel]) apply(frule primitive_extractor_correct(3)[OF n wf_disc_sel]) apply(subgoal_tac "getNeg spts = []") (*duplication above*) diff --git a/thy/Iptables_Semantics/Primitive_Matchers/Protocols_Normalize.thy b/thy/Iptables_Semantics/Primitive_Matchers/Protocols_Normalize.thy index 6f2a56a2..acefa7bc 100644 --- a/thy/Iptables_Semantics/Primitive_Matchers/Protocols_Normalize.thy +++ b/thy/Iptables_Semantics/Primitive_Matchers/Protocols_Normalize.thy @@ -199,7 +199,7 @@ lemma "simple_proto_conjunct p1 (Proto p2) \ None \ \ (\a. \ disc (Prot a)) \ normalized_nnf_match m \ compress_normalize_protocols_step m = Some m' \ normalized_nnf_match m' \ normalized_n_primitive (disc, sel) P m'" unfolding compress_normalize_protocols_step_def - using compress_normalize_primitve_preserves_normalized_n_primitive[OF _ wf_disc_sel_common_primitive(7)] by blast + using compress_normalize_primitive_preserves_normalized_n_primitive[OF _ wf_disc_sel_common_primitive(7)] by blast lemma "case compress_normalize_protocols_step diff --git a/thy/Iptables_Semantics/ROOT b/thy/Iptables_Semantics/ROOT index 40a72222..f531ea6d 100644 --- a/thy/Iptables_Semantics/ROOT +++ b/thy/Iptables_Semantics/ROOT @@ -19,7 +19,7 @@ session "Bitmagic" = IP_Addresses + session "Iptables_Semantics" = Routing + theories[document=false] "~~/src/HOL/Library/LaTeXsugar" - (*The Native_Word things destory the latex document! Does not compile, ...*) + (*The Native_Word things destroy the latex document! Does not compile, ...*) "../Native_Word/More_Bits_Int" "../Native_Word/Code_Target_Bits_Int" "~~/src/HOL/Library/Code_Target_Nat" diff --git a/thy/Iptables_Semantics/Semantics_Stateful.thy b/thy/Iptables_Semantics/Semantics_Stateful.thy index f7f9e715..94e3d27c 100644 --- a/thy/Iptables_Semantics/Semantics_Stateful.thy +++ b/thy/Iptables_Semantics/Semantics_Stateful.thy @@ -36,15 +36,15 @@ inductive semantics_stateful :: semantics_stateful \ \\<^sub>\ state_update \\<^sub>0 (built_in_chain, default_policy) ps (ps_processed@[(p, X)]) (state_update \' X p)" -lemma semantics_stateful_intro_process_one: "semantics_stateful \ \\<^sub>\ state_upate \\<^sub>0 (built_in_chain, default_policy) (p#ps) ps_processed_old \_old \ +lemma semantics_stateful_intro_process_one: "semantics_stateful \ \\<^sub>\ state_update \\<^sub>0 (built_in_chain, default_policy) (p#ps) ps_processed_old \_old \ \,\\<^sub>\ \_old,p\ \[Rule MatchAny (Call built_in_chain), Rule MatchAny default_policy], Undecided\ \ Decision X \ - \' = state_upate \_old X p \ + \' = state_update \_old X p \ ps_processed = ps_processed_old@[(p, X)] \ - semantics_stateful \ \\<^sub>\ state_upate \\<^sub>0 (built_in_chain, default_policy) ps ps_processed \'" + semantics_stateful \ \\<^sub>\ state_update \\<^sub>0 (built_in_chain, default_policy) ps ps_processed \'" by(auto intro: semantics_stateful.intros) lemma semantics_stateful_intro_start: "\\<^sub>0 = \' \ ps_processed = [] \ - semantics_stateful \ \\<^sub>\ state_upate \\<^sub>0 (built_in_chain, default_policy) ps ps_processed \'" + semantics_stateful \ \\<^sub>\ state_update \\<^sub>0 (built_in_chain, default_policy) ps ps_processed \'" by(auto intro: semantics_stateful.intros) @@ -58,7 +58,7 @@ text\In this model, the matcher is completely stateless but packets are pr inductive semantics_stateful_packet_tagging :: "'a ruleset \ ('a, 'ptagged) matcher \ - ('\ \ 'p \ 'ptagged) \ (*taggs the packet accordig to the current state before processing by firewall*) + ('\ \ 'p \ 'ptagged) \ (*taggs the packet according to the current state before processing by firewall*) ('\ \ final_decision \ 'p \ '\) \ (*state updater*) '\ \ (*Starting state. constant*) (string \ action) \ @@ -74,15 +74,15 @@ inductive semantics_stateful_packet_tagging :: lemma semantics_stateful_packet_tagging_intro_start: "\\<^sub>0 = \' \ ps_processed = [] \ - semantics_stateful_packet_tagging \ \ packet_tagger state_upate \\<^sub>0 (built_in_chain, default_policy) ps ps_processed \'" + semantics_stateful_packet_tagging \ \ packet_tagger state_update \\<^sub>0 (built_in_chain, default_policy) ps ps_processed \'" by(auto intro: semantics_stateful_packet_tagging.intros) lemma semantics_stateful_packet_tagging_intro_process_one: - "semantics_stateful_packet_tagging \ \ packet_tagger state_upate \\<^sub>0 (built_in_chain, default_policy) (p#ps) ps_processed_old \_old \ + "semantics_stateful_packet_tagging \ \ packet_tagger state_update \\<^sub>0 (built_in_chain, default_policy) (p#ps) ps_processed_old \_old \ \,\,(packet_tagger \_old p)\ \[Rule MatchAny (Call built_in_chain), Rule MatchAny default_policy], Undecided\ \ Decision X \ - \' = state_upate \_old X p \ + \' = state_update \_old X p \ ps_processed = ps_processed_old@[(p, X)] \ - semantics_stateful_packet_tagging \ \ packet_tagger state_upate \\<^sub>0 (built_in_chain, default_policy) ps ps_processed \'" + semantics_stateful_packet_tagging \ \ packet_tagger state_update \\<^sub>0 (built_in_chain, default_policy) ps ps_processed \'" by(auto intro: semantics_stateful_packet_tagging.intros) diff --git a/thy/Iptables_Semantics/Semantics_Ternary/Matching_Ternary.thy b/thy/Iptables_Semantics/Semantics_Ternary/Matching_Ternary.thy index 35b2f9a1..d55e893a 100644 --- a/thy/Iptables_Semantics/Semantics_Ternary/Matching_Ternary.thy +++ b/thy/Iptables_Semantics/Semantics_Ternary/Matching_Ternary.thy @@ -379,7 +379,7 @@ begin qed lemma matcheq_matchNone: "\ has_primitive m \ matcheq_matchNone m \ \ matches \ m a p" - by(auto dest: matcheq_matchAny matachAny_matchNone) + by(auto dest: matcheq_matchAny matchAny_matchNone) lemma matcheq_matchNone_not_matches: "matcheq_matchNone m \ \ matches \ m a p" proof(induction m rule: matcheq_matchNone.induct) diff --git a/thy/Iptables_Semantics/Semantics_Ternary/Normalized_Matches.thy b/thy/Iptables_Semantics/Semantics_Ternary/Normalized_Matches.thy index d6b6bc99..26d25a6e 100644 --- a/thy/Iptables_Semantics/Semantics_Ternary/Normalized_Matches.thy +++ b/thy/Iptables_Semantics/Semantics_Ternary/Normalized_Matches.thy @@ -4,7 +4,7 @@ begin section\Normalized (DNF) matches\ -text\simplify a match expression. The output is a list of match exprissions, the semantics is @{text "\"} of the list elements.\ +text\simplify a match expression. The output is a list of match expressions, the semantics is @{text "\"} of the list elements.\ fun normalize_match :: "'a match_expr \ 'a match_expr list" where "normalize_match (MatchAny) = [MatchAny]" | "normalize_match (Match m) = [Match m]" | @@ -351,7 +351,7 @@ fun normalized_nnf_match :: "'a match_expr \ bool" where text\Essentially, @{term normalized_nnf_match} checks for a negation normal form: Only AND is at toplevel, negation only occurs in front of literals. Since @{typ "'a match_expr"} does not support OR, the result is in conjunction normal form. - Applying @{const normalize_match}, the reuslt is a list. Essentially, this is the disjunctive normal form.\ + Applying @{const normalize_match}, the result is a list. Essentially, this is the disjunctive normal form.\ lemma normalize_match_already_normalized: "normalized_nnf_match m \ normalize_match m = [m]" by(induction m rule: normalize_match.induct) (simp)+ diff --git a/thy/Iptables_Semantics/Semantics_Ternary/Primitive_Normalization.thy b/thy/Iptables_Semantics/Semantics_Ternary/Primitive_Normalization.thy index b8efa18e..25e45388 100644 --- a/thy/Iptables_Semantics/Semantics_Ternary/Primitive_Normalization.thy +++ b/thy/Iptables_Semantics/Semantics_Ternary/Primitive_Normalization.thy @@ -380,7 +380,7 @@ lemma primitive_extractor_reassemble_preserves: normalized_nnf_match m \ P m \ P MatchAny \ - primitive_extractor (disc, sel) m = (as, ms) \ (*turn eqality around to simplify proof*) + primitive_extractor (disc, sel) m = (as, ms) \ (*turn equality around to simplify proof*) (\m1 m2. P (MatchAnd m1 m2) \ P m1 \ P m2) \ (\ls1 ls2. P (alist_and' (ls1 @ ls2)) \ P (alist_and' ls1) \ P (alist_and' ls2)) \ P (alist_and' (NegPos_map C as))" @@ -866,7 +866,7 @@ subsection\Optimizing a match expression\ thm normalize_primitive_extract_preserves_unrelated_normalized_n_primitive (*is similar*) - lemma compress_normalize_primitve_preserves_normalized_n_primitive: + lemma compress_normalize_primitive_preserves_normalized_n_primitive: assumes am: "normalized_n_primitive (disc2, sel2) P m" and wf: "wf_disc_sel (disc,sel) C" and disc: "(\a. \ disc2 (C a))" diff --git a/thy/Iptables_Semantics/Semantics_Ternary/Semantics_Ternary.thy b/thy/Iptables_Semantics/Semantics_Ternary/Semantics_Ternary.thy index 32fafed9..206054fc 100644 --- a/thy/Iptables_Semantics/Semantics_Ternary/Semantics_Ternary.thy +++ b/thy/Iptables_Semantics/Semantics_Ternary/Semantics_Ternary.thy @@ -190,7 +190,7 @@ fun approximating_bigstep_fun :: "('a, 'p) match_tac \ 'p \ Decision FinalDeny | Log \ approximating_bigstep_fun \ p rs Undecided | Empty \ approximating_bigstep_fun \ p rs Undecided - (*unhalndled cases*) + (*unhandled cases*) )" diff --git a/thy/Iptables_Semantics/document/mathpartir.sty b/thy/Iptables_Semantics/document/mathpartir.sty index a3ac05cf..4d1a627b 100644 --- a/thy/Iptables_Semantics/document/mathpartir.sty +++ b/thy/Iptables_Semantics/document/mathpartir.sty @@ -46,7 +46,7 @@ % in mathpar or in inferrule \let \mpr@hva \empty -%% normal paragraph parametters, should rather be taken dynamically +%% normal paragraph parameters, should rather be taken dynamically \def \mpr@savepar {% \edef \MathparNormalpar {\noexpand \lineskiplimit \the\lineskiplimit @@ -335,7 +335,7 @@ % line of its conclusion % % The second form can be parameterized, using the key=val interface. The -% folloiwng keys are recognized: +% following keys are recognized: % % width set the width of the rule to val % narrower set the width of the rule to val\hsize @@ -430,7 +430,7 @@ %%% Exports -% Envirnonment mathpar +% Environment mathpar \let \inferrule \mpr@infer diff --git a/thy/LOFT/Examples/OF_conv_test/3chn.py b/thy/LOFT/Examples/OF_conv_test/3chn.py index ede76d9d..6e28cf60 100755 --- a/thy/LOFT/Examples/OF_conv_test/3chn.py +++ b/thy/LOFT/Examples/OF_conv_test/3chn.py @@ -130,9 +130,9 @@ def build(self): csl = self.addLink(client, switch, intfName2='s1-lan') nhsl = self.addLink(nhost, switch, intfName2='s1-wan') -def dump(ofile, strng): +def dump(ofile, text): with open(ofile, "w") as fo: - fo.write(strng.replace("\r\n","\n")) + fo.write(text.replace("\r\n","\n")) def tcpreachtest(net, client, server, port=80, timeout=2.5): output("TCP {}: {} -> {}: ".format(port, client, server)) @@ -164,7 +164,7 @@ def makearpentries(host, hosts): for intf in host.intfList(): if intf.MAC() and intf.IP(): # will also sort out loopback for host in hosts: - host.cmd("arp -s {} {}".format(intf.IP(), intf.MAC())) # will fail with Netwok unreachable at given times. Easier to ignore than fix. + host.cmd("arp -s {} {}".format(intf.IP(), intf.MAC())) # will fail with Network unreachable at given times. Easier to ignore than fix. def standalone(): if "info" in argv: diff --git a/thy/LOFT/Featherweight_OpenFlow_Comparison.thy b/thy/LOFT/Featherweight_OpenFlow_Comparison.thy index 4bfe4cb2..af3525f5 100644 --- a/thy/LOFT/Featherweight_OpenFlow_Comparison.thy +++ b/thy/LOFT/Featherweight_OpenFlow_Comparison.thy @@ -91,7 +91,7 @@ lemma guha_equal_hlp: apply(simp add: guha_equal_Action[OF no]) apply(simp add: guha_equal_NoAction[OF no]) apply(subgoal_tac False, simp) - apply(simp add: no no_overlaps_not_unefined) + apply(simp add: no no_overlaps_not_undefined) done lemma guha_deterministic1: "guha_table_semantics \ ft p (Some x1) \ \ guha_table_semantics \ ft p None" @@ -120,8 +120,8 @@ lemma guha_equal: using guha_deterministic1 apply fast using guha_deterministic2[OF no] apply blast using guha_deterministic1 apply fast - using no_overlaps_not_unefined[OF no] apply fastforce - using no_overlaps_not_unefined[OF no] apply fastforce + using no_overlaps_not_undefined[OF no] apply fastforce + using no_overlaps_not_undefined[OF no] apply fastforce done lemma guha_nondeterministicD: diff --git a/thy/LOFT/LinuxRouter_OpenFlow_Translation.thy b/thy/LOFT/LinuxRouter_OpenFlow_Translation.thy index 03f630e3..ee3bc54f 100644 --- a/thy/LOFT/LinuxRouter_OpenFlow_Translation.thy +++ b/thy/LOFT/LinuxRouter_OpenFlow_Translation.thy @@ -475,7 +475,7 @@ definition "no_oif_match \ list_all (\m. oiface (match_sel m) = i definition "lr_of_tran rt fw ifs \ if \ (no_oif_match fw \ has_default_policy fw \ simple_fw_valid fw \ valid_prefixes rt \ has_default_route rt \ distinct ifs) - then Inl ''Error in creating OpenFlow table: prerequisites not satisifed'' + then Inl ''Error in creating OpenFlow table: prerequisites not satisfied'' else ( let nrd = lr_of_tran_fbs rt fw ifs; ard = map (apfst of_nat) (annotate_rlen nrd) (* give them a priority *) diff --git a/thy/LOFT/OpenFlow_Documentation.thy b/thy/LOFT/OpenFlow_Documentation.thy index ea00a6b6..952048d1 100644 --- a/thy/LOFT/OpenFlow_Documentation.thy +++ b/thy/LOFT/OpenFlow_Documentation.thy @@ -211,9 +211,9 @@ schematic_goal "(field_match :: of_match_field) \ { L4Dst (?pd :: 16 word) (?md :: 16 word) }" by(fact of_match_field_typeset) text\ -Two things are worth additional mention: L3 and L4 ``addressess''. +Two things are worth additional mention: L3 and L4 ``addresses''. The @{term IPv4Src} and @{term IPv4Dst} matches are specified as ``can be subnet masked'' in~\cite{specification10}, - whereras~\cite{specification15} states clearly that arbitrary bitmasks can be used. We took the conservative approach here. + whereas~\cite{specification15} states clearly that arbitrary bitmasks can be used. We took the conservative approach here. Our alteration of @{term L4Src} and @{term L4Dst} is more grave. While~\cite{specification10} does not state anything about layer 4 ports and masks, \cite{specification15} specifically forbids using masks on them. Nevertheless, OpenVSwitch \cite{openvswitch} and some other implementations support them. @@ -228,7 +228,7 @@ Guha \emph{et al.} decided to use the fact that the preconditions can be arrange They evaluated match conditions in a manner following that graph: first, all field matches without preconditions are evaluated. Upon evaluating a field match (e.g., @{term "EtherType 0x0800"}), the matches that had their precondition fulfilled by it - (e.g., @{term IPv4Src} and @{term IPv4Src} in this example) are evalutated. + (e.g., @{term IPv4Src} and @{term IPv4Src} in this example) are evaluated. This mirrors the faulty behavior of some implementations (see \cite{guha2013machine}). Adopting that behavior into our model would mean that any packet matches against the field match set @{term "{IPv4Dst (PrefixMatch 134744072 32)}"} instead of just those destined for 8.8.8.8 or causing an error. We found this to be unsatisfactory.\ @@ -314,9 +314,9 @@ We could have made this definition on sets but chose not to for consistency.}: The use of @{term Undefined} immediately raises the question in which condition it cannot occur. We give the following definition:\ lemma "check_no_overlap \ ft = (\a \ set ft. \b \ set ft. (a \ b \ ofe_prio a = ofe_prio b) \ \(\p. \ (ofe_fields a) p \ \ (ofe_fields b) p))" unfolding check_no_overlap_alt check_no_overlap2_def by force -text\Together with distinctness of the flow table, this provides the abscence of @{term Undefined}\footnote{It is slightly stronger than necessary, overlapping rules might be shadowed and thus never influence the behavior.}:\ +text\Together with distinctness of the flow table, this provides the absence of @{term Undefined}\footnote{It is slightly stronger than necessary, overlapping rules might be shadowed and thus never influence the behavior.}:\ lemma "\check_no_overlap \ ft; distinct ft\ \ - OF_priority_match \ ft p \ Undefined" by (simp add: no_overlapsI no_overlaps_not_unefined) + OF_priority_match \ ft p \ Undefined" by (simp add: no_overlapsI no_overlaps_not_undefined) text\Given the absence of overlapping or duplicate flow entries, we can show two interesting equivalences. the first is the equality to the semantics defined by Guha \emph{et al.}:\ @@ -396,7 +396,7 @@ text_raw\ \ lemma "lr_of_tran rt fw ifs \ if \ (no_oif_match fw \ has_default_policy fw \ simple_fw_valid fw \ valid_prefixes rt \ has_default_route rt \ distinct ifs) - then Inl ''Error in creating OpenFlow table: prerequisites not satisifed'' + then Inl ''Error in creating OpenFlow table: prerequisites not satisfied'' else ( let nfw = map simple_rule_dtor fw; @@ -501,7 +501,7 @@ Obviously, we will never see any packets with an input interface that is not in Furthermore, we do not state anything about non-IPv4 traffic. (The traffic will remain unmatched in by the flow table, but we have not verified that.) The last assumption is that the translation does not return a run-time error. The translation will return a run-time error if the rules can not be assigned priorities from a 16 bit integer, -or when one of the following conditions on the input data is not satisifed:\ +or when one of the following conditions on the input data is not satisfied:\ lemma " \ no_oif_match fw \ \ has_default_policy fw \ diff --git a/thy/LOFT/OpenFlow_Matches.thy b/thy/LOFT/OpenFlow_Matches.thy index 71c04369..bb48b6b8 100644 --- a/thy/LOFT/OpenFlow_Matches.thy +++ b/thy/LOFT/OpenFlow_Matches.thy @@ -57,7 +57,7 @@ For example: try with oxm_type=OXM_OF_ETH_TYPE, oxm_hasmask=0, and oxm_value=0x0800. That is, match- ing on the IPv4 source address is allowed only if the Ethernet type is explicitly set to IPv4. \ -Even if OpenFlow 1.0 does not require this behavior, some switches may still silently drop matches without prerequsites. +Even if OpenFlow 1.0 does not require this behavior, some switches may still silently drop matches without prerequisites. *) @@ -80,11 +80,11 @@ function prerequisites :: "of_match_field \ of_match_field set \ m \ prerequisites v m)" | (* OF_IPV4_DST ETH_TYPE=0x0800 *) "prerequisites (IPv4Dst _) m = (let v = EtherType 0x0800 in v \ m \ prerequisites v m)" | -(* Now here goes a bit of fuzz: OF specifies differen OXM_OF_(TCP,UDP,L4_Protocol.SCTP,\)_(SRC,DST). I only have L4Src. So gotta make do with that. *) +(* Now here goes a bit of fuzz: OF specifies different OXM_OF_(TCP,UDP,L4_Protocol.SCTP,\)_(SRC,DST). I only have L4Src. So gotta make do with that. *) "prerequisites (L4Src _ _) m = (\proto \ {TCP,UDP,L4_Protocol.SCTP}. let v = IPv4Proto proto in v \ m \ prerequisites v m)" | "prerequisites (L4Dst _ _) m = prerequisites (L4Src undefined undefined) m" by pat_completeness auto -(* Ignoredd PACKET_TYPE=foo *) +(* Ignored PACKET_TYPE=foo *) fun match_sorter :: "of_match_field \ nat" where "match_sorter (IngressPort _) = 1" | diff --git a/thy/LOFT/Semantics_OpenFlow.thy b/thy/LOFT/Semantics_OpenFlow.thy index bc9e4a9c..c7dd4945 100644 --- a/thy/LOFT/Semantics_OpenFlow.thy +++ b/thy/LOFT/Semantics_OpenFlow.thy @@ -37,7 +37,7 @@ datatype ('m, 'a) flow_entry_match = OFEntry (ofe_prio: "16 word") (ofe_fields: find_consts "(('a \ 'b) \ 'c) \ 'a \ 'b \ 'c" (* but no "uncurry" *) find_consts "('a \ 'b \ 'c) \ ('a \ 'b) \ 'c" -(* Anyway, we want this to easily construct OFEntrys from tuples *) +(* Anyway, we want this to easily construct OFEntry-s from tuples *) definition "split3 f p \ case p of (a,b,c) \ f a b c" find_consts "('a \ 'b \ 'c \ 'd) \ ('a \ 'b \ 'c) \ 'd" @@ -60,7 +60,7 @@ definition OF_same_priority_match2 :: "('m, 'p) field_matcher \ ('m, | (Suc 0) \ Action (the_elem s) | _ \ Undefined" -(* are there any overlaping rules? *) +(* are there any overlapping rules? *) definition "check_no_overlap \ ft = (\a \ set ft. \b \ set ft. \p \ UNIV. (ofe_prio a = ofe_prio b \ \ (ofe_fields a) p \ a \ b) \ \\ (ofe_fields b) p)" definition "check_no_overlap2 \ ft = (\a \ set ft. \b \ set ft. (a \ b \ ofe_prio a = ofe_prio b) \ \(\p \ UNIV. \ (ofe_fields a) p \ \ (ofe_fields b) p))" lemma check_no_overlap_alt: "check_no_overlap \ ft = check_no_overlap2 \ ft" @@ -68,7 +68,7 @@ lemma check_no_overlap_alt: "check_no_overlap \ ft = check_no_overlap2 \< by blast (* If there are no overlapping rules, our match should check out. *) -lemma no_overlap_not_unefined: "check_no_overlap \ ft \ OF_same_priority_match2 \ ft p \ Undefined" +lemma no_overlap_not_undefined: "check_no_overlap \ ft \ OF_same_priority_match2 \ ft p \ Undefined" proof assume goal1: "check_no_overlap \ ft" "OF_same_priority_match2 \ ft p = Undefined" let ?as = "{f. f \ set ft \ \ (ofe_fields f) p \ (\fo \ set ft. ofe_prio f < ofe_prio fo \ \ \ (ofe_fields fo) p)}" @@ -363,7 +363,7 @@ lemma OF_spm3_noa_none: unfolding OF_eq_sort[OF no] by(drule OF_lm_noa_none) simp (* repetition of the lemma for definition 2 for definition 3 *) -lemma no_overlaps_not_unefined: "no_overlaps \ ft \ OF_priority_match \ ft p \ Undefined" - using check_no_overlapI no_overlap_not_unefined no_overlaps_defeq by fastforce +lemma no_overlaps_not_undefined: "no_overlaps \ ft \ OF_priority_match \ ft p \ Undefined" + using check_no_overlapI no_overlap_not_undefined no_overlaps_defeq by fastforce end diff --git a/thy/LOFT/document/chap3.tex b/thy/LOFT/document/chap3.tex index f3289b41..c556d94e 100644 --- a/thy/LOFT/document/chap3.tex +++ b/thy/LOFT/document/chap3.tex @@ -1,7 +1,7 @@ \section{Evaluation}\label{sec:eval} In Section~\ref{sec:conv}, we have made lots of definitions and created lots of models. How far these models are in accordance with the real world has been up to the vigilance of the reader. -This section attemts to leviate this burden by providing some examples. +This section attempts to alleviate this burden by providing some examples. \subsection{Mininet Examples} \label{sec:mnex} @@ -148,7 +148,7 @@ \subsection{Performance Evaluation} \item Some combinations of matches from the firewall and the routing table cannot be ruled out, since the firewall match might only contain an output port and the rule can thus only apply for the packets matching a few routing table entries. However, the translation is not aware of that and can thus not remove the combination of the firewall rule and other routing table entries. \end{itemize} -In some rules, the conditions above coincede, resulting in $416\ (=16 \cdot 26)$ rules. +In some rules, the conditions above coincide, resulting in $416\ (=16 \cdot 26)$ rules. To avoid the high number of rules resulting from the port matches, rules that forbids packets with source or destination port 0 could be added to the start of the firewall and the $1$-$65535$ could be removed; dealing with the firewall / routing table problem is part of the future work on output interfaces. diff --git a/thy/LOFT/document/moeptikz.sty b/thy/LOFT/document/moeptikz.sty index 02be8479..117f3bfc 100644 --- a/thy/LOFT/document/moeptikz.sty +++ b/thy/LOFT/document/moeptikz.sty @@ -1517,7 +1517,7 @@ messageclosed/.default={1cm,black!30,black} } -% Redfined checkerboard decoration with better image quality than the default +% Redefined checkerboard decoration with better image quality than the default \pgfdeclarepatternformonly[\CheckerSize]{moepcheckerboard}% name {\pgfqpoint{0mm}{0mm}}% origin {\pgfqpoint{2\CheckerSize}{2\CheckerSize}}% top right diff --git a/thy/Native_Word/Uint_Userguide.thy b/thy/Native_Word/Uint_Userguide.thy index e3aa53b7..4fa1703e 100644 --- a/thy/Native_Word/Uint_Userguide.thy +++ b/thy/Native_Word/Uint_Userguide.thy @@ -155,7 +155,7 @@ text {* subsection {* Example: expressions and two semantics *} text {* - As the running example, we consider a language of expressions (literal values, less-than comparisions and conditional) where values are either booleans or 32-bit words. + As the running example, we consider a language of expressions (literal values, less-than comparisons and conditional) where values are either booleans or 32-bit words. The original specification uses the type @{typ "32 word"}. *} @@ -266,7 +266,7 @@ text {* semantics (code equations @{thm [source] eval.simps} and @{thm [source] step.equation}, respectively). - We check that the adaptation has suceeded by exporting the functions. + We check that the adaptation has succeeded by exporting the functions. As we only use native word sizes that PolyML supports, we can use the usual target @{text "SML"} instead of @{text "SML_word"}. *} diff --git a/thy/Routing/Routing_Table.thy b/thy/Routing/Routing_Table.thy index 0af0a92d..cec4c2f2 100644 --- a/thy/Routing/Routing_Table.thy +++ b/thy/Routing/Routing_Table.thy @@ -164,12 +164,12 @@ proof(induction rtbl) qed qed(simp add: unambiguous_routing_def) -lemma unambigous_prefix_routing_weak_mono: +lemma unambiguous_prefix_routing_weak_mono: assumes lpfx: "is_longest_prefix_routing (rr#rtbl)" assumes e:"rr' \ set rtbl" shows "routing_rule_sort_key rr' \ routing_rule_sort_key rr" using assms by(simp add: linorder_class.sorted_Cons is_longest_prefix_routing_def) -lemma unambigous_prefix_routing_strong_mono: +lemma unambiguous_prefix_routing_strong_mono: assumes lpfx: "is_longest_prefix_routing (rr#rtbl)" assumes uam: "unambiguous_routing (rr#rtbl)" assumes e:"rr' \ set rtbl" @@ -177,7 +177,7 @@ lemma unambigous_prefix_routing_strong_mono: shows "routing_rule_sort_key rr' > routing_rule_sort_key rr" proof - from uam e ne have "routing_rule_sort_key rr \ routing_rule_sort_key rr'" by(fastforce simp add: unambiguous_routing_def) - moreover from unambigous_prefix_routing_weak_mono lpfx e have "routing_rule_sort_key rr \ routing_rule_sort_key rr'" . + moreover from unambiguous_prefix_routing_weak_mono lpfx e have "routing_rule_sort_key rr \ routing_rule_sort_key rr'" . ultimately show ?thesis by simp qed @@ -222,7 +222,7 @@ next from C have e: "rr' \ set rtbl" using rr' by simp show False proof cases assume eq: "routing_match rr' = routing_match rr" - with e have "routing_rule_sort_key rr < routing_rule_sort_key rr'" using unambigous_prefix_routing_strong_mono[OF Cons.prems(2,4) _ eq] by simp + with e have "routing_rule_sort_key rr < routing_rule_sort_key rr'" using unambiguous_prefix_routing_strong_mono[OF Cons.prems(2,4) _ eq] by simp with True rr' show False by simp next assume ne: "routing_match rr' \ routing_match rr" @@ -230,7 +230,7 @@ next note same_length_prefixes_distinct[OF this(1,2) ne[symmetric] _ True this(3)] moreover have "routing_prefix rr = routing_prefix rr'" (is ?pe) proof - have "routing_rule_sort_key rr < routing_rule_sort_key rr' \ \ prefix_match_semantics (routing_match rr) addr" using rr' by simp - with unambigous_prefix_routing_weak_mono[OF Cons.prems(2) e] True have "routing_rule_sort_key rr = routing_rule_sort_key rr'" by simp + with unambiguous_prefix_routing_weak_mono[OF Cons.prems(2) e] True have "routing_rule_sort_key rr = routing_rule_sort_key rr'" by simp thus ?pe by(simp add: routing_rule_sort_key_def int_of_nat_def) qed ultimately show False . @@ -440,7 +440,7 @@ proof(rule ccontr) from x have "\b2g. x \ wordinterval_to_set b2g \ wordinterval_to_set b2g \ wordinterval_to_set b2 \ (a2, b2g) \ set (routing_port_ranges tbl wordinterval_UNIV)" using iuf(2) by(fastforce simp add: wordinterval_Union) then obtain b2g where b2g: "x \ wordinterval_to_set b2g" "wordinterval_to_set b2g \ wordinterval_to_set b2" "(a2, b2g) \ set (routing_port_ranges tbl wordinterval_UNIV)" by clarsimp - text\Soudness tells us that the both @{term a1} and @{term a2} have to be the result of routing @{term x}.\ + text\Soundness tells us that the both @{term a1} and @{term a2} have to be the result of routing @{term x}.\ note routing_port_ranges_sound[OF b1g(3), unfolded fst_conv snd_conv, OF b1g(1) vpfx] routing_port_ranges_sound[OF b2g(3), unfolded fst_conv snd_conv, OF b2g(1) vpfx] text\A contradiction follows from @{thm dif}.\ with dif show False by simp diff --git a/thy/Simple_Firewall/Common/IP_Partition_Preliminaries.thy b/thy/Simple_Firewall/Common/IP_Partition_Preliminaries.thy index 445239c9..475fbf50 100644 --- a/thy/Simple_Firewall/Common/IP_Partition_Preliminaries.thy +++ b/thy/Simple_Firewall/Common/IP_Partition_Preliminaries.thy @@ -76,7 +76,7 @@ context begin private lemma coversallAddSubset:"\ (insert a ts) = \ (addSubsetSet a ts)" by (auto simp add: addSubsetSet_def) - private lemma ipPartioningAddSubset0: "disjoint ts \ ipPartition ts (addSubsetSet a ts)" + private lemma ipPartitioningAddSubset0: "disjoint ts \ ipPartition ts (addSubsetSet a ts)" apply(simp add: addSubsetSet_def ipPartition_def) apply(safe) apply blast @@ -175,7 +175,7 @@ context begin case Nil thus ?case by(simp add: ipPartition_def) next case (Cons t ts) - from Cons.prems ipPartioningAddSubset0 have d: "ipPartition As (addSubsetSet t As)" by blast + from Cons.prems ipPartitioningAddSubset0 have d: "ipPartition As (addSubsetSet t As)" by blast from Cons.prems Cons.IH d disjointAddSubset ipPartitioningAddSubset1 have e: "ipPartition (set ts) (partitioning ts (addSubsetSet t As))" by blast from ipPartitioningAddSubset2 Cons.prems diff --git a/thy/Simple_Firewall/Primitives/Iface.thy b/thy/Simple_Firewall/Primitives/Iface.thy index 05a2fe23..1e38f0d4 100644 --- a/thy/Simple_Firewall/Primitives/Iface.thy +++ b/thy/Simple_Firewall/Primitives/Iface.thy @@ -524,7 +524,7 @@ begin apply(simp_all) done - text\Non-wildacrd interfaces of length @{term n}\ + text\Non-wildcard interfaces of length @{term n}\ private definition non_wildcard_ifaces :: "nat \ string list" where "non_wildcard_ifaces n \ filter (\i. \ iface_name_is_wildcard i) (List.n_lists n all_chars)" @@ -537,7 +537,7 @@ begin private lemma "(\ i \ set (non_wildcard_ifaces n). internal_iface_name_to_set i) = {s::string. length s = n \ \ iface_name_is_wildcard s}" by(simp add: non_wildcard_ifaces) - text\Non-wildacrd interfaces up to length @{term n}\ + text\Non-wildcard interfaces up to length @{term n}\ private fun non_wildcard_ifaces_upto :: "nat \ string list" where "non_wildcard_ifaces_upto 0 = [[]]" | "non_wildcard_ifaces_upto (Suc n) = (non_wildcard_ifaces (Suc n)) @ non_wildcard_ifaces_upto n" diff --git a/thy/Simple_Firewall/Service_Matrix.thy b/thy/Simple_Firewall/Service_Matrix.thy index 97960335..353c555d 100644 --- a/thy/Simple_Firewall/Service_Matrix.thy +++ b/thy/Simple_Firewall/Service_Matrix.thy @@ -1428,7 +1428,7 @@ the edges are the edges. Result looks nice. Theorem also tells us that this visu vertices = (name, list of ip addresses this vertex corresponds to) and edges = (name \ name) list - Note that the WordInterval is already sorted, which is important for prettyness! + Note that the WordInterval is already sorted, which is important for prettiness! *) text\Only defined for @{const simple_firewall_without_interfaces}\ definition access_matrix_pretty_ipv4 diff --git a/thy/Word_Lib/Word_Lemmas.thy b/thy/Word_Lib/Word_Lemmas.thy index b715bf52..1a1e0430 100644 --- a/thy/Word_Lib/Word_Lemmas.thy +++ b/thy/Word_Lib/Word_Lemmas.thy @@ -3007,7 +3007,7 @@ lemma word_le_plus:"\(a::('a::len) word) < a + b; c < b\ \ len_of TYPE('s) \ len_of TYPE('s) \ len_of TYPE('l) \ (of_bl:: bool list \ 'l::len word) (to_bl ((of_bl:: bool list \ 's::len word) (to_bl w))) = w" @@ -4655,12 +4655,12 @@ lemma bl_cast_long_short_long_ingoreLeadingZero_generic: For example: 'l::len word is 128 word (full ipv6 address) 's::len word is 16 word (address piece of ipv6 address in colon-text-representation) *) -corollary ucast_short_ucast_long_ingoreLeadingZero: +corollary ucast_short_ucast_long_ignoreLeadingZero: "length (dropWhile Not (to_bl w)) \ len_of TYPE('s) \ len_of TYPE('s) \ len_of TYPE('l) \ (ucast:: 's::len word \ 'l::len word) ((ucast:: 'l::len word \ 's::len word) w) = w" apply(subst Word.ucast_bl)+ - apply(rule bl_cast_long_short_long_ingoreLeadingZero_generic) + apply(rule bl_cast_long_short_long_ignoreLeadingZero_generic) apply(simp_all) done diff --git a/thy/Word_Lib/Word_Lemmas_32.thy b/thy/Word_Lib/Word_Lemmas_32.thy index b7f6414a..d4750b34 100644 --- a/thy/Word_Lib/Word_Lemmas_32.thy +++ b/thy/Word_Lib/Word_Lemmas_32.thy @@ -290,7 +290,7 @@ lemma cast_chunk_assemble_id_64'[simp]: "(((ucast ((scast (x::64 word))::32 word))::64 word) || (((ucast ((scast (x >> 32))::32 word))::64 word) << 32)) = x" by (simp add:cast_chunk_scast_assemble_id) -(* Specialiasations of down_cast_same for adding to local simpsets. *) +(* Specialisations of down_cast_same for adding to local simpsets. *) lemma cast_down_u64: "(scast::64 word \ 32 word) = (ucast::64 word \ 32 word)" apply (subst down_cast_same[symmetric]) apply (simp add:is_down)+ diff --git a/thy/Word_Lib/Word_Lemmas_64.thy b/thy/Word_Lib/Word_Lemmas_64.thy index e5394a75..c1c66d7a 100644 --- a/thy/Word_Lib/Word_Lemmas_64.thy +++ b/thy/Word_Lib/Word_Lemmas_64.thy @@ -267,7 +267,7 @@ lemma cast_chunk_assemble_id_64'[simp]: "(((ucast ((scast (x::64 word))::32 word))::64 word) || (((ucast ((scast (x >> 32))::32 word))::64 word) << 32)) = x" by (simp add:cast_chunk_scast_assemble_id) -(* Specialiasations of down_cast_same for adding to local simpsets. *) +(* Specialisations of down_cast_same for adding to local simpsets. *) lemma cast_down_u64: "(scast::64 word \ 32 word) = (ucast::64 word \ 32 word)" apply (subst down_cast_same[symmetric]) apply (simp add:is_down)+