entropy-mnemonics-master/0000755000175000017500000000000013062365143014464 5ustar freefreeentropy-mnemonics-master/.gitignore0000644000175000017500000000002313062365143016447 0ustar freefree*.swp *.out *.html entropy-mnemonics-master/.travis.yml0000644000175000017500000000050513062365143016575 0ustar freefreelanguage: go go: - 1.8 install: - go get -u github.com/golang/lint/golint - go get -u golang.org/x/tools/cmd/cover - go get -u golang.org/x/text/unicode/norm - test -z "$(go fmt)" - test -z "$(golint)" - test -z "$(go vet)" script: go test -v -bench=. && go test -covermode=atomic && go install sudo: false entropy-mnemonics-master/LICENSE0000644000175000017500000000206413062365143015473 0ustar freefreeThe MIT License (MIT) Copyright (c) 2015 Nebulous Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. entropy-mnemonics-master/README.md0000644000175000017500000000421713062365143015747 0ustar freefree# entropy-mnemonics [![Build Status](https://travis-ci.org/NebulousLabs/entropy-mnemonics.svg?branch=master)](https://travis-ci.org/NebulousLabs/entropy-mnemonics) [![Documentation](https://img.shields.io/badge/docs-godoc-blue.svg)](https://godoc.org/github.com/NebulousLabs/entropy-mnemonics) mnemonics is a golang package that converts byte slices into human-friendly phrases. The primary purpose is to assist with the generation of cryptographically secure passwords. The threshold for a cryptographically secure password is between 128 and 256 bits, which when converted to base64 is 22-43 random characters. Random characters are both difficult to remember and subject to error when spoken or written down; slurring or sloppy handwriting can make it difficult to recover a password. These considerations may seem strange to those who use password managers; why write down the password at all? The answer is: healthy paranoia. Retaining a physical copy of a password protects the user from disk failure and malware. mnemonics solves these problems by converting byte slices into human-intelligible phrases. Take the following 128 bit example: ``` Hex: a26a4821e36c7f7dccaa5484c080cefa Base64: ompIIeNsf33MqlSEwIDO+g== Mnemonic: austere sniff aching hiding pact damp focus tacit timber pram left wonders ``` Though more verbose, the mnemonic phrase is less prone to human transcription errors. The words are chosen from a dictionary of size 1626, such that a 12-word phrase corresponds to almost exactly 128 bits of entropy. Note that only the first few characters of each word need be unique; for the English dictionary, 3 characters are sufficient. This means that passphrases can be altered to make them more understandable or more easily memorized. For example, the phrase "austere sniff aching" could be changed to "austere sniff achoo" and the phrase would still decode correctly. Full UTF-8 support is available for dictionaries, including input normalization for inputs with [canonical equivalence](https://en.wikipedia.org/wiki/Unicode_equivalence). ### Supported Dictionaries ### + English (prefix size: 3) + German (prefix size: 4) + Japanese (prefix size: 3) entropy-mnemonics-master/english.go0000644000175000017500000005232613062365143016454 0ustar freefreepackage mnemonics // The english dictionary was pulled from the Monero project, license included // below. // Copyright (c) 2014-2015, The Monero Project // // All rights reserved. // // Redistribution and use in source and binary forms, with or without modification, are // permitted provided that the following conditions are met: // // 1. Redistributions of source code must retain the above copyright notice, this list of // conditions and the following disclaimer. // // 2. Redistributions in binary form must reproduce the above copyright notice, this list // of conditions and the following disclaimer in the documentation and/or other // materials provided with the distribution. // // 3. Neither the name of the copyright holder nor the names of its contributors may be // used to endorse or promote products derived from this software without specific // prior written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY // EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF // MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL // THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, // PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS // INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, // STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF // THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. const ( // English is an id pointing to the standard password dictionary for // english users. English DictionaryID = "english" // EnglishUniquePrefixLen indicates the number of letters needed to achieve // unique prefixes throughout the english dictionary. No two words in the // dictionary have the same prefix of len EnglishUniquePrefixLen. EnglishUniquePrefixLen = 3 ) var ( englishDictionary = Dictionary{ "abbey", "abducts", "ability", "ablaze", "abnormal", "abort", "abrasive", "absorb", "abyss", "academy", "aces", "aching", "acidic", "acoustic", "acquire", "across", "actress", "acumen", "adapt", "addicted", "adept", "adhesive", "adjust", "adopt", "adrenalin", "adult", "adventure", "aerial", "afar", "affair", "afield", "afloat", "afoot", "afraid", "after", "against", "agenda", "aggravate", "agile", "aglow", "agnostic", "agony", "agreed", "ahead", "aided", "ailments", "aimless", "airport", "aisle", "ajar", "akin", "alarms", "album", "alchemy", "alerts", "algebra", "alkaline", "alley", "almost", "aloof", "alpine", "already", "also", "altitude", "alumni", "always", "amaze", "ambush", "amended", "amidst", "ammo", "amnesty", "among", "amply", "amused", "anchor", "android", "anecdote", "angled", "ankle", "annoyed", "answers", "antics", "anvil", "anxiety", "anybody", "apart", "apex", "aphid", "aplomb", "apology", "apply", "apricot", "aptitude", "aquarium", "arbitrary", "archer", "ardent", "arena", "argue", "arises", "army", "around", "arrow", "arsenic", "artistic", "ascend", "ashtray", "aside", "asked", "asleep", "aspire", "assorted", "asylum", "athlete", "atlas", "atom", "atrium", "attire", "auburn", "auctions", "audio", "august", "aunt", "austere", "autumn", "avatar", "avidly", "avoid", "awakened", "awesome", "awful", "awkward", "awning", "awoken", "axes", "axis", "axle", "aztec", "azure", "baby", "bacon", "badge", "baffles", "bagpipe", "bailed", "bakery", "balding", "bamboo", "banjo", "baptism", "basin", "batch", "bawled", "bays", "because", "beer", "befit", "begun", "behind", "being", "below", "bemused", "benches", "berries", "bested", "betting", "bevel", "beware", "beyond", "bias", "bicycle", "bids", "bifocals", "biggest", "bikini", "bimonthly", "binocular", "biology", "biplane", "birth", "biscuit", "bite", "biweekly", "blender", "blip", "bluntly", "boat", "bobsled", "bodies", "bogeys", "boil", "boldly", "bomb", "border", "boss", "both", "bounced", "bovine", "bowling", "boxes", "boyfriend", "broken", "brunt", "bubble", "buckets", "budget", "buffet", "bugs", "building", "bulb", "bumper", "bunch", "business", "butter", "buying", "buzzer", "bygones", "byline", "bypass", "cabin", "cactus", "cadets", "cafe", "cage", "cajun", "cake", "calamity", "camp", "candy", "casket", "catch", "cause", "cavernous", "cease", "cedar", "ceiling", "cell", "cement", "cent", "certain", "chlorine", "chrome", "cider", "cigar", "cinema", "circle", "cistern", "citadel", "civilian", "claim", "click", "clue", "coal", "cobra", "cocoa", "code", "coexist", "coffee", "cogs", "cohesive", "coils", "colony", "comb", "cool", "copy", "corrode", "costume", "cottage", "cousin", "cowl", "criminal", "cube", "cucumber", "cuddled", "cuffs", "cuisine", "cunning", "cupcake", "custom", "cycling", "cylinder", "cynical", "dabbing", "dads", "daft", "dagger", "daily", "damp", "dangerous", "dapper", "darted", "dash", "dating", "dauntless", "dawn", "daytime", "dazed", "debut", "decay", "dedicated", "deepest", "deftly", "degrees", "dehydrate", "deity", "dejected", "delayed", "demonstrate", "dented", "deodorant", "depth", "desk", "devoid", "dewdrop", "dexterity", "dialect", "dice", "diet", "different", "digit", "dilute", "dime", "dinner", "diode", "diplomat", "directed", "distance", "ditch", "divers", "dizzy", "doctor", "dodge", "does", "dogs", "doing", "dolphin", "domestic", "donuts", "doorway", "dormant", "dosage", "dotted", "double", "dove", "down", "dozen", "dreams", "drinks", "drowning", "drunk", "drying", "dual", "dubbed", "duckling", "dude", "duets", "duke", "dullness", "dummy", "dunes", "duplex", "duration", "dusted", "duties", "dwarf", "dwelt", "dwindling", "dying", "dynamite", "dyslexic", "each", "eagle", "earth", "easy", "eating", "eavesdrop", "eccentric", "echo", "eclipse", "economics", "ecstatic", "eden", "edgy", "edited", "educated", "eels", "efficient", "eggs", "egotistic", "eight", "either", "eject", "elapse", "elbow", "eldest", "eleven", "elite", "elope", "else", "eluded", "emails", "ember", "emerge", "emit", "emotion", "empty", "emulate", "energy", "enforce", "enhanced", "enigma", "enjoy", "enlist", "enmity", "enough", "enraged", "ensign", "entrance", "envy", "epoxy", "equip", "erase", "erected", "erosion", "error", "eskimos", "espionage", "essential", "estate", "etched", "eternal", "ethics", "etiquette", "evaluate", "evenings", "evicted", "evolved", "examine", "excess", "exhale", "exit", "exotic", "exquisite", "extra", "exult", "fabrics", "factual", "fading", "fainted", "faked", "fall", "family", "fancy", "farming", "fatal", "faulty", "fawns", "faxed", "fazed", "feast", "february", "federal", "feel", "feline", "females", "fences", "ferry", "festival", "fetches", "fever", "fewest", "fiat", "fibula", "fictional", "fidget", "fierce", "fifteen", "fight", "films", "firm", "fishing", "fitting", "five", "fixate", "fizzle", "fleet", "flippant", "flying", "foamy", "focus", "foes", "foggy", "foiled", "folding", "fonts", "foolish", "fossil", "fountain", "fowls", "foxes", "foyer", "framed", "friendly", "frown", "fruit", "frying", "fudge", "fuel", "fugitive", "fully", "fuming", "fungal", "furnished", "fuselage", "future", "fuzzy", "gables", "gadget", "gags", "gained", "galaxy", "gambit", "gang", "gasp", "gather", "gauze", "gave", "gawk", "gaze", "gearbox", "gecko", "geek", "gels", "gemstone", "general", "geometry", "germs", "gesture", "getting", "geyser", "ghetto", "ghost", "giant", "giddy", "gifts", "gigantic", "gills", "gimmick", "ginger", "girth", "giving", "glass", "gleeful", "glide", "gnaw", "gnome", "goat", "goblet", "godfather", "goes", "goggles", "going", "goldfish", "gone", "goodbye", "gopher", "gorilla", "gossip", "gotten", "gourmet", "governing", "gown", "greater", "grunt", "guarded", "guest", "guide", "gulp", "gumball", "guru", "gusts", "gutter", "guys", "gymnast", "gypsy", "gyrate", "habitat", "hacksaw", "haggled", "hairy", "hamburger", "happens", "hashing", "hatchet", "haunted", "having", "hawk", "haystack", "hazard", "hectare", "hedgehog", "heels", "hefty", "height", "hemlock", "hence", "heron", "hesitate", "hexagon", "hickory", "hiding", "highway", "hijack", "hiker", "hills", "himself", "hinder", "hippo", "hire", "history", "hitched", "hive", "hoax", "hobby", "hockey", "hoisting", "hold", "honked", "hookup", "hope", "hornet", "hospital", "hotel", "hounded", "hover", "howls", "hubcaps", "huddle", "huge", "hull", "humid", "hunter", "hurried", "husband", "huts", "hybrid", "hydrogen", "hyper", "iceberg", "icing", "icon", "identity", "idiom", "idled", "idols", "igloo", "ignore", "iguana", "illness", "imagine", "imbalance", "imitate", "impel", "inactive", "inbound", "incur", "industrial", "inexact", "inflamed", "ingested", "initiate", "injury", "inkling", "inline", "inmate", "innocent", "inorganic", "input", "inquest", "inroads", "insult", "intended", "inundate", "invoke", "inwardly", "ionic", "irate", "iris", "irony", "irritate", "island", "isolated", "issued", "italics", "itches", "items", "itinerary", "itself", "ivory", "jabbed", "jackets", "jaded", "jagged", "jailed", "jamming", "january", "jargon", "jaunt", "javelin", "jaws", "jazz", "jeans", "jeers", "jellyfish", "jeopardy", "jerseys", "jester", "jetting", "jewels", "jigsaw", "jingle", "jittery", "jive", "jobs", "jockey", "jogger", "joining", "joking", "jolted", "jostle", "journal", "joyous", "jubilee", "judge", "juggled", "juicy", "jukebox", "july", "jump", "junk", "jury", "justice", "juvenile", "kangaroo", "karate", "keep", "kennel", "kept", "kernels", "kettle", "keyboard", "kickoff", "kidneys", "king", "kiosk", "kisses", "kitchens", "kiwi", "knapsack", "knee", "knife", "knowledge", "knuckle", "koala", "laboratory", "ladder", "lagoon", "lair", "lakes", "lamb", "language", "laptop", "large", "last", "later", "launching", "lava", "lawsuit", "layout", "lazy", "lectures", "ledge", "leech", "left", "legion", "leisure", "lemon", "lending", "leopard", "lesson", "lettuce", "lexicon", "liar", "library", "licks", "lids", "lied", "lifestyle", "light", "likewise", "lilac", "limits", "linen", "lion", "lipstick", "liquid", "listen", "lively", "loaded", "lobster", "locker", "lodge", "lofty", "logic", "loincloth", "long", "looking", "lopped", "lordship", "losing", "lottery", "loudly", "love", "lower", "loyal", "lucky", "luggage", "lukewarm", "lullaby", "lumber", "lunar", "lurk", "lush", "luxury", "lymph", "lynx", "lyrics", "macro", "madness", "magically", "mailed", "major", "makeup", "malady", "mammal", "maps", "masterful", "match", "maul", "maverick", "maximum", "mayor", "maze", "meant", "mechanic", "medicate", "meeting", "megabyte", "melting", "memoir", "menu", "merger", "mesh", "metro", "mews", "mice", "midst", "mighty", "mime", "mirror", "misery", "mittens", "mixture", "moat", "mobile", "mocked", "mohawk", "moisture", "molten", "moment", "money", "moon", "mops", "morsel", "mostly", "motherly", "mouth", "movement", "mowing", "much", "muddy", "muffin", "mugged", "mullet", "mumble", "mundane", "muppet", "mural", "musical", "muzzle", "myriad", "mystery", "myth", "nabbing", "nagged", "nail", "names", "nanny", "napkin", "narrate", "nasty", "natural", "nautical", "navy", "nearby", "necklace", "needed", "negative", "neither", "neon", "nephew", "nerves", "nestle", "network", "neutral", "never", "newt", "nexus", "nibs", "niche", "niece", "nifty", "nightly", "nimbly", "nineteen", "nirvana", "nitrogen", "nobody", "nocturnal", "nodes", "noises", "nomad", "noodles", "northern", "nostril", "noted", "nouns", "novelty", "nowhere", "nozzle", "nuance", "nucleus", "nudged", "nugget", "nuisance", "null", "number", "nuns", "nurse", "nutshell", "nylon", "oaks", "oars", "oasis", "oatmeal", "obedient", "object", "obliged", "obnoxious", "observant", "obtains", "obvious", "occur", "ocean", "october", "odds", "odometer", "offend", "often", "oilfield", "ointment", "okay", "older", "olive", "olympics", "omega", "omission", "omnibus", "onboard", "oncoming", "oneself", "ongoing", "onion", "online", "onslaught", "onto", "onward", "oozed", "opacity", "opened", "opposite", "optical", "opus", "orange", "orbit", "orchid", "orders", "organs", "origin", "ornament", "orphans", "oscar", "ostrich", "otherwise", "otter", "ouch", "ought", "ounce", "ourselves", "oust", "outbreak", "oval", "oven", "owed", "owls", "owner", "oxidant", "oxygen", "oyster", "ozone", "pact", "paddles", "pager", "pairing", "palace", "pamphlet", "pancakes", "paper", "paradise", "pastry", "patio", "pause", "pavements", "pawnshop", "payment", "peaches", "pebbles", "peculiar", "pedantic", "peeled", "pegs", "pelican", "pencil", "people", "pepper", "perfect", "pests", "petals", "phase", "pheasants", "phone", "phrases", "physics", "piano", "picked", "pierce", "pigment", "piloted", "pimple", "pinched", "pioneer", "pipeline", "pirate", "pistons", "pitched", "pivot", "pixels", "pizza", "playful", "pledge", "pliers", "plotting", "plus", "plywood", "poaching", "pockets", "podcast", "poetry", "point", "poker", "polar", "ponies", "pool", "popular", "portents", "possible", "potato", "pouch", "poverty", "powder", "pram", "present", "pride", "problems", "pruned", "prying", "psychic", "public", "puck", "puddle", "puffin", "pulp", "pumpkins", "punch", "puppy", "purged", "push", "putty", "puzzled", "pylons", "pyramid", "python", "queen", "quick", "quote", "rabbits", "racetrack", "radar", "rafts", "rage", "railway", "raking", "rally", "ramped", "randomly", "rapid", "rarest", "rash", "rated", "ravine", "rays", "razor", "react", "rebel", "recipe", "reduce", "reef", "refer", "regular", "reheat", "reinvest", "rejoices", "rekindle", "relic", "remedy", "renting", "reorder", "repent", "request", "reruns", "rest", "return", "reunion", "revamp", "rewind", "rhino", "rhythm", "ribbon", "richly", "ridges", "rift", "rigid", "rims", "ringing", "riots", "ripped", "rising", "ritual", "river", "roared", "robot", "rockets", "rodent", "rogue", "roles", "romance", "roomy", "roped", "roster", "rotate", "rounded", "rover", "rowboat", "royal", "ruby", "rudely", "ruffled", "rugged", "ruined", "ruling", "rumble", "runway", "rural", "rustled", "ruthless", "sabotage", "sack", "sadness", "safety", "saga", "sailor", "sake", "salads", "sample", "sanity", "sapling", "sarcasm", "sash", "satin", "saucepan", "saved", "sawmill", "saxophone", "sayings", "scamper", "scenic", "school", "science", "scoop", "scrub", "scuba", "seasons", "second", "sedan", "seeded", "segments", "seismic", "selfish", "semifinal", "sensible", "september", "sequence", "serving", "session", "setup", "seventh", "sewage", "shackles", "shelter", "shipped", "shocking", "shrugged", "shuffled", "shyness", "siblings", "sickness", "sidekick", "sieve", "sifting", "sighting", "silk", "simplest", "sincerely", "sipped", "siren", "situated", "sixteen", "sizes", "skater", "skew", "skirting", "skulls", "skydive", "slackens", "sleepless", "slid", "slower", "slug", "smash", "smelting", "smidgen", "smog", "smuggled", "snake", "sneeze", "sniff", "snout", "snug", "soapy", "sober", "soccer", "soda", "software", "soggy", "soil", "solved", "somewhere", "sonic", "soothe", "soprano", "sorry", "southern", "sovereign", "sowed", "soya", "space", "speedy", "sphere", "spiders", "splendid", "spout", "sprig", "spud", "spying", "square", "stacking", "stellar", "stick", "stockpile", "strained", "stunning", "stylishly", "subtly", "succeed", "suddenly", "suede", "suffice", "sugar", "suitcase", "sulking", "summon", "sunken", "superior", "surfer", "sushi", "suture", "swagger", "swept", "swiftly", "sword", "swung", "syllabus", "symptoms", "syndrome", "syringe", "system", "taboo", "tacit", "tadpoles", "tagged", "tail", "taken", "talent", "tamper", "tanks", "tapestry", "tarnished", "tasked", "tattoo", "taunts", "tavern", "tawny", "taxi", "teardrop", "technical", "tedious", "teeming", "tell", "template", "tender", "tepid", "tequila", "terminal", "testing", "tether", "textbook", "thaw", "theatrics", "thirsty", "thorn", "threaten", "thumbs", "thwart", "ticket", "tidy", "tiers", "tiger", "tilt", "timber", "tinted", "tipsy", "tirade", "tissue", "titans", "toaster", "tobacco", "today", "toenail", "toffee", "together", "toilet", "token", "tolerant", "tomorrow", "tonic", "toolbox", "topic", "torch", "tossed", "total", "touchy", "towel", "toxic", "toyed", "trash", "trendy", "tribal", "trolling", "truth", "trying", "tsunami", "tubes", "tucks", "tudor", "tuesday", "tufts", "tugs", "tuition", "tulips", "tumbling", "tunnel", "turnip", "tusks", "tutor", "tuxedo", "twang", "tweezers", "twice", "twofold", "tycoon", "typist", "tyrant", "ugly", "ulcers", "ultimate", "umbrella", "umpire", "unafraid", "unbending", "uncle", "under", "uneven", "unfit", "ungainly", "unhappy", "union", "unjustly", "unknown", "unlikely", "unmask", "unnoticed", "unopened", "unplugs", "unquoted", "unrest", "unsafe", "until", "unusual", "unveil", "unwind", "unzip", "upbeat", "upcoming", "update", "upgrade", "uphill", "upkeep", "upload", "upon", "upper", "upright", "upstairs", "uptight", "upwards", "urban", "urchins", "urgent", "usage", "useful", "usher", "using", "usual", "utensils", "utility", "utmost", "utopia", "uttered", "vacation", "vague", "vain", "value", "vampire", "vane", "vapidly", "vary", "vastness", "vats", "vaults", "vector", "veered", "vegan", "vehicle", "vein", "velvet", "venomous", "verification", "vessel", "veteran", "vexed", "vials", "vibrate", "victim", "video", "viewpoint", "vigilant", "viking", "village", "vinegar", "violin", "vipers", "virtual", "visited", "vitals", "vivid", "vixen", "vocal", "vogue", "voice", "volcano", "vortex", "voted", "voucher", "vowels", "voyage", "vulture", "wade", "waffle", "wagtail", "waist", "waking", "wallets", "wanted", "warped", "washing", "water", "waveform", "waxing", "wayside", "weavers", "website", "wedge", "weekday", "weird", "welders", "went", "wept", "were", "western", "wetsuit", "whale", "when", "whipped", "whole", "wickets", "width", "wield", "wife", "wiggle", "wildly", "winter", "wipeout", "wiring", "wise", "withdrawn", "wives", "wizard", "wobbly", "woes", "woken", "wolf", "womanly", "wonders", "woozy", "worry", "wounded", "woven", "wrap", "wrist", "wrong", "yacht", "yahoo", "yanks", "yard", "yawning", "yearbook", "yellow", "yesterday", "yeti", "yields", "yodel", "yoga", "younger", "yoyo", "zapped", "zeal", "zebra", "zero", "zesty", "zigzags", "zinger", "zippers", "zodiac", "zombie", "zones", "zoom", } ) entropy-mnemonics-master/english_test.go0000644000175000017500000000430413062365143017504 0ustar freefreepackage mnemonics import ( "bytes" "crypto/rand" "testing" "unicode/utf8" "golang.org/x/text/unicode/norm" ) // TestEnglishDictionary checks that the english dictionary is well formed. func TestEnglishDictionary(t *testing.T) { // Check for sane constants. if English != "english" { t.Error("unexpected identifier for english dictionary") } if EnglishUniquePrefixLen != 3 { t.Error("unexpected prefix len for english dictionary") } // Check that the dictionary has well formed elements, and no repeats. engMap := make(map[string]struct{}) for _, word := range englishDictionary { // Check that the word is long enough. if utf8.RuneCountInString(word) < EnglishUniquePrefixLen { t.Fatal("found a short word:", word) } // Check that the word is normalized. newWord := norm.NFC.String(word) if newWord != word { t.Error("found a non-normalized word:", word) } // Fetch the prefix, composed of the first EnglishUniquePrefixLen // runes. var prefix []byte var runeCount int for _, r := range word { encR := make([]byte, utf8.RuneLen(r)) utf8.EncodeRune(encR, r) prefix = append(prefix, encR...) runeCount++ if runeCount == EnglishUniquePrefixLen { break } } // Check that the prefix is unique. str := string(prefix) _, exists := engMap[str] if exists { t.Error("found a prefix conflict:", word) } engMap[str] = struct{}{} } // Do some conversions with the english dictionary. for i := 1; i <= 32; i++ { for j := 0; j < 5; j++ { entropy := make([]byte, i) _, err := rand.Read(entropy) if err != nil { t.Fatal(err) } phrase, err := ToPhrase(entropy, English) if err != nil { t.Fatal(err) } check, err := FromPhrase(phrase, English) if err != nil { t.Fatal(err) } if bytes.Compare(entropy, check) != 0 { t.Error("conversion check failed for the english dictionary") } } } // Check that words in a phrase can be altered according to the prefix // rule. entropy := []byte{1, 2, 3, 4} phrase := Phrase{"chladsf", "syr", "afiezzz"} check, err := FromPhrase(phrase, English) if err != nil { t.Fatal(err) } if bytes.Compare(entropy, check) != 0 { t.Error("phrase substitution failed") } } entropy-mnemonics-master/german.go0000644000175000017500000005415213062365143016273 0ustar freefreepackage mnemonics // The german wordlist was pulled from the Monero project, license included // below. // Word list originally created by Shrikez // // Copyright (c) 2014-2015, The Monero Project // // All rights reserved. // // Redistribution and use in source and binary forms, with or without modification, are // permitted provided that the following conditions are met: // // 1. Redistributions of source code must retain the above copyright notice, this list of // conditions and the following disclaimer. // // 2. Redistributions in binary form must reproduce the above copyright notice, this list // of conditions and the following disclaimer in the documentation and/or other // materials provided with the distribution. // // 3. Neither the name of the copyright holder nor the names of its contributors may be // used to endorse or promote products derived from this software without specific // prior written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY // EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF // MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL // THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, // PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS // INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, // STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF // THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. const ( // German is an id pointing to the standard password dictionary for german // users. German DictionaryID = "german" // GermanUniquePrefixLen indicates the number of letters needed to achieve // unique prefixes throughout the german dictionary. No two words in the // dictionary have the same prefix of len GermanUniquePrefixLen. GermanUniquePrefixLen = 4 ) var ( germanDictionary = Dictionary{ "Abakus", "Abart", "abbilden", "Abbruch", "Abdrift", "Abendrot", "Abfahrt", "abfeuern", "Abflug", "abfragen", "Abglanz", "abhärten", "abheben", "Abhilfe", "Abitur", "Abkehr", "Ablauf", "ablecken", "Ablösung", "Abnehmer", "abnutzen", "Abonnent", "Abrasion", "Abrede", "abrüsten", "Absicht", "Absprung", "Abstand", "absuchen", "Abteil", "Abundanz", "abwarten", "Abwurf", "Abzug", "Achse", "Achtung", "Acker", "Aderlass", "Adler", "Admiral", "Adresse", "Affe", "Affront", "Afrika", "Aggregat", "Agilität", "ähneln", "Ahnung", "Ahorn", "Akazie", "Akkord", "Akrobat", "Aktfoto", "Aktivist", "Albatros", "Alchimie", "Alemanne", "Alibi", "Alkohol", "Allee", "Allüre", "Almosen", "Almweide", "Aloe", "Alpaka", "Alpental", "Alphabet", "Alpinist", "Alraune", "Altbier", "Alter", "Altflöte", "Altruist", "Alublech", "Aludose", "Amateur", "Amazonas", "Ameise", "Amnesie", "Amok", "Ampel", "Amphibie", "Ampulle", "Amsel", "Amulett", "Anakonda", "Analogie", "Ananas", "Anarchie", "Anatomie", "Anbau", "Anbeginn", "anbieten", "Anblick", "ändern", "andocken", "Andrang", "anecken", "Anflug", "Anfrage", "Anführer", "Angebot", "Angler", "Anhalter", "Anhöhe", "Animator", "Anis", "Anker", "ankleben", "Ankunft", "Anlage", "anlocken", "Anmut", "Annahme", "Anomalie", "Anonymus", "Anorak", "anpeilen", "Anrecht", "Anruf", "Ansage", "Anschein", "Ansicht", "Ansporn", "Anteil", "Antlitz", "Antrag", "Antwort", "Anwohner", "Aorta", "Apfel", "Appetit", "Applaus", "Aquarium", "Arbeit", "Arche", "Argument", "Arktis", "Armband", "Aroma", "Asche", "Askese", "Asphalt", "Asteroid", "Ästhetik", "Astronom", "Atelier", "Athlet", "Atlantik", "Atmung", "Audienz", "aufatmen", "Auffahrt", "aufholen", "aufregen", "Aufsatz", "Auftritt", "Aufwand", "Augapfel", "Auktion", "Ausbruch", "Ausflug", "Ausgabe", "Aushilfe", "Ausland", "Ausnahme", "Aussage", "Autobahn", "Avocado", "Axthieb", "Bach", "backen", "Badesee", "Bahnhof", "Balance", "Balkon", "Ballett", "Balsam", "Banane", "Bandage", "Bankett", "Barbar", "Barde", "Barett", "Bargeld", "Barkasse", "Barriere", "Bart", "Bass", "Bastler", "Batterie", "Bauch", "Bauer", "Bauholz", "Baujahr", "Baum", "Baustahl", "Bauteil", "Bauweise", "Bazar", "beachten", "Beatmung", "beben", "Becher", "Becken", "bedanken", "beeilen", "beenden", "Beere", "befinden", "Befreier", "Begabung", "Begierde", "begrüßen", "Beiboot", "Beichte", "Beifall", "Beigabe", "Beil", "Beispiel", "Beitrag", "beizen", "bekommen", "beladen", "Beleg", "bellen", "belohnen", "Bemalung", "Bengel", "Benutzer", "Benzin", "beraten", "Bereich", "Bergluft", "Bericht", "Bescheid", "Besitz", "besorgen", "Bestand", "Besuch", "betanken", "beten", "betören", "Bett", "Beule", "Beute", "Bewegung", "bewirken", "Bewohner", "bezahlen", "Bezug", "biegen", "Biene", "Bierzelt", "bieten", "Bikini", "Bildung", "Billard", "binden", "Biobauer", "Biologe", "Bionik", "Biotop", "Birke", "Bison", "Bitte", "Biwak", "Bizeps", "blasen", "Blatt", "Blauwal", "Blende", "Blick", "Blitz", "Blockade", "Blödelei", "Blondine", "Blues", "Blume", "Blut", "Bodensee", "Bogen", "Boje", "Bollwerk", "Bonbon", "Bonus", "Boot", "Bordarzt", "Börse", "Böschung", "Boudoir", "Boxkampf", "Boykott", "Brahms", "Brandung", "Brauerei", "Brecher", "Breitaxt", "Bremse", "brennen", "Brett", "Brief", "Brigade", "Brillanz", "bringen", "brodeln", "Brosche", "Brötchen", "Brücke", "Brunnen", "Brüste", "Brutofen", "Buch", "Büffel", "Bugwelle", "Bühne", "Buletten", "Bullauge", "Bumerang", "bummeln", "Buntglas", "Bürde", "Burgherr", "Bursche", "Busen", "Buslinie", "Bussard", "Butangas", "Butter", "Cabrio", "campen", "Captain", "Cartoon", "Cello", "Chalet", "Charisma", "Chefarzt", "Chiffon", "Chipsatz", "Chirurg", "Chor", "Chronik", "Chuzpe", "Clubhaus", "Cockpit", "Codewort", "Cognac", "Coladose", "Computer", "Coupon", "Cousin", "Cracking", "Crash", "Curry", "Dach", "Dackel", "daddeln", "daliegen", "Dame", "Dammbau", "Dämon", "Dampflok", "Dank", "Darm", "Datei", "Datsche", "Datteln", "Datum", "Dauer", "Daunen", "Deckel", "Decoder", "Defekt", "Degen", "Dehnung", "Deiche", "Dekade", "Dekor", "Delfin", "Demut", "denken", "Deponie", "Design", "Desktop", "Dessert", "Detail", "Detektiv", "Dezibel", "Diadem", "Diagnose", "Dialekt", "Diamant", "Dichter", "Dickicht", "Diesel", "Diktat", "Diplom", "Direktor", "Dirne", "Diskurs", "Distanz", "Docht", "Dohle", "Dolch", "Domäne", "Donner", "Dorade", "Dorf", "Dörrobst", "Dorsch", "Dossier", "Dozent", "Drachen", "Draht", "Drama", "Drang", "Drehbuch", "Dreieck", "Dressur", "Drittel", "Drossel", "Druck", "Duell", "Duft", "Düne", "Dünung", "dürfen", "Duschbad", "Düsenjet", "Dynamik", "Ebbe", "Echolot", "Echse", "Eckball", "Edding", "Edelweiß", "Eden", "Edition", "Efeu", "Effekte", "Egoismus", "Ehre", "Eiablage", "Eiche", "Eidechse", "Eidotter", "Eierkopf", "Eigelb", "Eiland", "Eilbote", "Eimer", "einatmen", "Einband", "Eindruck", "Einfall", "Eingang", "Einkauf", "einladen", "Einöde", "Einrad", "Eintopf", "Einwurf", "Einzug", "Eisbär", "Eisen", "Eishöhle", "Eismeer", "Eiweiß", "Ekstase", "Elan", "Elch", "Elefant", "Eleganz", "Element", "Elfe", "Elite", "Elixier", "Ellbogen", "Eloquenz", "Emigrant", "Emission", "Emotion", "Empathie", "Empfang", "Endzeit", "Energie", "Engpass", "Enkel", "Enklave", "Ente", "entheben", "Entität", "entladen", "Entwurf", "Episode", "Epoche", "erachten", "Erbauer", "erblühen", "Erdbeere", "Erde", "Erdgas", "Erdkunde", "Erdnuss", "Erdöl", "Erdteil", "Ereignis", "Eremit", "erfahren", "Erfolg", "erfreuen", "erfüllen", "Ergebnis", "erhitzen", "erkalten", "erkennen", "erleben", "Erlösung", "ernähren", "erneuern", "Ernte", "Eroberer", "eröffnen", "Erosion", "Erotik", "Erpel", "erraten", "Erreger", "erröten", "Ersatz", "Erstflug", "Ertrag", "Eruption", "erwarten", "erwidern", "Erzbau", "Erzeuger", "erziehen", "Esel", "Eskimo", "Eskorte", "Espe", "Espresso", "essen", "Etage", "Etappe", "Etat", "Ethik", "Etikett", "Etüde", "Eule", "Euphorie", "Europa", "Everest", "Examen", "Exil", "Exodus", "Extrakt", "Fabel", "Fabrik", "Fachmann", "Fackel", "Faden", "Fagott", "Fahne", "Faible", "Fairness", "Fakt", "Fakultät", "Falke", "Fallobst", "Fälscher", "Faltboot", "Familie", "Fanclub", "Fanfare", "Fangarm", "Fantasie", "Farbe", "Farmhaus", "Farn", "Fasan", "Faser", "Fassung", "fasten", "Faulheit", "Fauna", "Faust", "Favorit", "Faxgerät", "Fazit", "fechten", "Federboa", "Fehler", "Feier", "Feige", "feilen", "Feinripp", "Feldbett", "Felge", "Fellpony", "Felswand", "Ferien", "Ferkel", "Fernweh", "Ferse", "Fest", "Fettnapf", "Feuer", "Fiasko", "Fichte", "Fiktion", "Film", "Filter", "Filz", "Finanzen", "Findling", "Finger", "Fink", "Finnwal", "Fisch", "Fitness", "Fixpunkt", "Fixstern", "Fjord", "Flachbau", "Flagge", "Flamenco", "Flanke", "Flasche", "Flaute", "Fleck", "Flegel", "flehen", "Fleisch", "fliegen", "Flinte", "Flirt", "Flocke", "Floh", "Floskel", "Floß", "Flöte", "Flugzeug", "Flunder", "Flusstal", "Flutung", "Fockmast", "Fohlen", "Föhnlage", "Fokus", "folgen", "Foliant", "Folklore", "Fontäne", "Förde", "Forelle", "Format", "Forscher", "Fortgang", "Forum", "Fotograf", "Frachter", "Fragment", "Fraktion", "fräsen", "Frauenpo", "Freak", "Fregatte", "Freiheit", "Freude", "Frieden", "Frohsinn", "Frosch", "Frucht", "Frühjahr", "Fuchs", "Fügung", "fühlen", "Füller", "Fundbüro", "Funkboje", "Funzel", "Furnier", "Fürsorge", "Fusel", "Fußbad", "Futteral", "Gabelung", "gackern", "Gage", "gähnen", "Galaxie", "Galeere", "Galopp", "Gameboy", "Gamsbart", "Gandhi", "Gang", "Garage", "Gardine", "Garküche", "Garten", "Gasthaus", "Gattung", "gaukeln", "Gazelle", "Gebäck", "Gebirge", "Gebräu", "Geburt", "Gedanke", "Gedeck", "Gedicht", "Gefahr", "Gefieder", "Geflügel", "Gefühl", "Gegend", "Gehirn", "Gehöft", "Gehweg", "Geige", "Geist", "Gelage", "Geld", "Gelenk", "Gelübde", "Gemälde", "Gemeinde", "Gemüse", "genesen", "Genuss", "Gepäck", "Geranie", "Gericht", "Germane", "Geruch", "Gesang", "Geschenk", "Gesetz", "Gesindel", "Gesöff", "Gespan", "Gestade", "Gesuch", "Getier", "Getränk", "Getümmel", "Gewand", "Geweih", "Gewitter", "Gewölbe", "Geysir", "Giftzahn", "Gipfel", "Giraffe", "Gitarre", "glänzen", "Glasauge", "Glatze", "Gleis", "Globus", "Glück", "glühen", "Glutofen", "Goldzahn", "Gondel", "gönnen", "Gottheit", "graben", "Grafik", "Grashalm", "Graugans", "greifen", "Grenze", "grillen", "Groschen", "Grotte", "Grube", "Grünalge", "Gruppe", "gruseln", "Gulasch", "Gummibär", "Gurgel", "Gürtel", "Güterzug", "Haarband", "Habicht", "hacken", "hadern", "Hafen", "Hagel", "Hähnchen", "Haifisch", "Haken", "Halbaffe", "Halsader", "halten", "Halunke", "Handbuch", "Hanf", "Harfe", "Harnisch", "härten", "Harz", "Hasenohr", "Haube", "hauchen", "Haupt", "Haut", "Havarie", "Hebamme", "hecheln", "Heck", "Hedonist", "Heiler", "Heimat", "Heizung", "Hektik", "Held", "helfen", "Helium", "Hemd", "hemmen", "Hengst", "Herd", "Hering", "Herkunft", "Hermelin", "Herrchen", "Herzdame", "Heulboje", "Hexe", "Hilfe", "Himbeere", "Himmel", "Hingabe", "hinhören", "Hinweis", "Hirsch", "Hirte", "Hitzkopf", "Hobel", "Hochform", "Hocker", "hoffen", "Hofhund", "Hofnarr", "Höhenzug", "Hohlraum", "Hölle", "Holzboot", "Honig", "Honorar", "horchen", "Hörprobe", "Höschen", "Hotel", "Hubraum", "Hufeisen", "Hügel", "huldigen", "Hülle", "Humbug", "Hummer", "Humor", "Hund", "Hunger", "Hupe", "Hürde", "Hurrikan", "Hydrant", "Hypnose", "Ibis", "Idee", "Idiot", "Igel", "Illusion", "Imitat", "impfen", "Import", "Inferno", "Ingwer", "Inhalte", "Inland", "Insekt", "Ironie", "Irrfahrt", "Irrtum", "Isolator", "Istwert", "Jacke", "Jade", "Jagdhund", "Jäger", "Jaguar", "Jahr", "Jähzorn", "Jazzfest", "Jetpilot", "jobben", "Jochbein", "jodeln", "Jodsalz", "Jolle", "Journal", "Jubel", "Junge", "Junimond", "Jupiter", "Jutesack", "Juwel", "Kabarett", "Kabine", "Kabuff", "Käfer", "Kaffee", "Kahlkopf", "Kaimauer", "Kajüte", "Kaktus", "Kaliber", "Kaltluft", "Kamel", "kämmen", "Kampagne", "Kanal", "Känguru", "Kanister", "Kanone", "Kante", "Kanu", "kapern", "Kapitän", "Kapuze", "Karneval", "Karotte", "Käsebrot", "Kasper", "Kastanie", "Katalog", "Kathode", "Katze", "kaufen", "Kaugummi", "Kauz", "Kehle", "Keilerei", "Keksdose", "Kellner", "Keramik", "Kerze", "Kessel", "Kette", "keuchen", "kichern", "Kielboot", "Kindheit", "Kinnbart", "Kinosaal", "Kiosk", "Kissen", "Klammer", "Klang", "Klapprad", "Klartext", "kleben", "Klee", "Kleinod", "Klima", "Klingel", "Klippe", "Klischee", "Kloster", "Klugheit", "Klüngel", "kneten", "Knie", "Knöchel", "knüpfen", "Kobold", "Kochbuch", "Kohlrabi", "Koje", "Kokosöl", "Kolibri", "Kolumne", "Kombüse", "Komiker", "kommen", "Konto", "Konzept", "Kopfkino", "Kordhose", "Korken", "Korsett", "Kosename", "Krabbe", "Krach", "Kraft", "Krähe", "Kralle", "Krapfen", "Krater", "kraulen", "Kreuz", "Krokodil", "Kröte", "Kugel", "Kuhhirt", "Kühnheit", "Künstler", "Kurort", "Kurve", "Kurzfilm", "kuscheln", "küssen", "Kutter", "Labor", "lachen", "Lackaffe", "Ladeluke", "Lagune", "Laib", "Lakritze", "Lammfell", "Land", "Langmut", "Lappalie", "Last", "Laterne", "Latzhose", "Laubsäge", "laufen", "Laune", "Lausbub", "Lavasee", "Leben", "Leder", "Leerlauf", "Lehm", "Lehrer", "leihen", "Lektüre", "Lenker", "Lerche", "Leseecke", "Leuchter", "Lexikon", "Libelle", "Libido", "Licht", "Liebe", "liefern", "Liftboy", "Limonade", "Lineal", "Linoleum", "List", "Liveband", "Lobrede", "locken", "Löffel", "Logbuch", "Logik", "Lohn", "Loipe", "Lokal", "Lorbeer", "Lösung", "löten", "Lottofee", "Löwe", "Luchs", "Luder", "Luftpost", "Luke", "Lümmel", "Lunge", "lutschen", "Luxus", "Macht", "Magazin", "Magier", "Magnet", "mähen", "Mahlzeit", "Mahnmal", "Maibaum", "Maisbrei", "Makel", "malen", "Mammut", "Maniküre", "Mantel", "Marathon", "Marder", "Marine", "Marke", "Marmor", "Märzluft", "Maske", "Maßanzug", "Maßkrug", "Mastkorb", "Material", "Matratze", "Mauerbau", "Maulkorb", "Mäuschen", "Mäzen", "Medium", "Meinung", "melden", "Melodie", "Mensch", "Merkmal", "Messe", "Metall", "Meteor", "Methode", "Metzger", "Mieze", "Milchkuh", "Mimose", "Minirock", "Minute", "mischen", "Missetat", "mitgehen", "Mittag", "Mixtape", "Möbel", "Modul", "mögen", "Möhre", "Molch", "Moment", "Monat", "Mondflug", "Monitor", "Monokini", "Monster", "Monument", "Moorhuhn", "Moos", "Möpse", "Moral", "Mörtel", "Motiv", "Motorrad", "Möwe", "Mühe", "Mulatte", "Müller", "Mumie", "Mund", "Münze", "Muschel", "Muster", "Mythos", "Nabel", "Nachtzug", "Nackedei", "Nagel", "Nähe", "Nähnadel", "Namen", "Narbe", "Narwal", "Nasenbär", "Natur", "Nebel", "necken", "Neffe", "Neigung", "Nektar", "Nenner", "Neptun", "Nerz", "Nessel", "Nestbau", "Netz", "Neubau", "Neuerung", "Neugier", "nicken", "Niere", "Nilpferd", "nisten", "Nocke", "Nomade", "Nordmeer", "Notdurft", "Notstand", "Notwehr", "Nudismus", "Nuss", "Nutzhanf", "Oase", "Obdach", "Oberarzt", "Objekt", "Oboe", "Obsthain", "Ochse", "Odyssee", "Ofenholz", "öffnen", "Ohnmacht", "Ohrfeige", "Ohrwurm", "Ökologie", "Oktave", "Ölberg", "Olive", "Ölkrise", "Omelett", "Onkel", "Oper", "Optiker", "Orange", "Orchidee", "ordnen", "Orgasmus", "Orkan", "Ortskern", "Ortung", "Ostasien", "Ozean", "Paarlauf", "Packeis", "paddeln", "Paket", "Palast", "Pandabär", "Panik", "Panorama", "Panther", "Papagei", "Papier", "Paprika", "Paradies", "Parka", "Parodie", "Partner", "Passant", "Patent", "Patzer", "Pause", "Pavian", "Pedal", "Pegel", "peilen", "Perle", "Person", "Pfad", "Pfau", "Pferd", "Pfleger", "Physik", "Pier", "Pilotwal", "Pinzette", "Piste", "Plakat", "Plankton", "Platin", "Plombe", "plündern", "Pobacke", "Pokal", "polieren", "Popmusik", "Porträt", "Posaune", "Postamt", "Pottwal", "Pracht", "Pranke", "Preis", "Primat", "Prinzip", "Protest", "Proviant", "Prüfung", "Pubertät", "Pudding", "Pullover", "Pulsader", "Punkt", "Pute", "Putsch", "Puzzle", "Python", "quaken", "Qualle", "Quark", "Quellsee", "Querkopf", "Quitte", "Quote", "Rabauke", "Rache", "Radclub", "Radhose", "Radio", "Radtour", "Rahmen", "Rampe", "Randlage", "Ranzen", "Rapsöl", "Raserei", "rasten", "Rasur", "Rätsel", "Raubtier", "Raumzeit", "Rausch", "Reaktor", "Realität", "Rebell", "Rede", "Reetdach", "Regatta", "Regen", "Rehkitz", "Reifen", "Reim", "Reise", "Reizung", "Rekord", "Relevanz", "Rennboot", "Respekt", "Restmüll", "retten", "Reue", "Revolte", "Rhetorik", "Rhythmus", "Richtung", "Riegel", "Rindvieh", "Rippchen", "Ritter", "Robbe", "Roboter", "Rockband", "Rohdaten", "Roller", "Roman", "röntgen", "Rose", "Rosskur", "Rost", "Rotahorn", "Rotglut", "Rotznase", "Rubrik", "Rückweg", "Rufmord", "Ruhe", "Ruine", "Rumpf", "Runde", "Rüstung", "rütteln", "Saaltür", "Saatguts", "Säbel", "Sachbuch", "Sack", "Saft", "sagen", "Sahneeis", "Salat", "Salbe", "Salz", "Sammlung", "Samt", "Sandbank", "Sanftmut", "Sardine", "Satire", "Sattel", "Satzbau", "Sauerei", "Saum", "Säure", "Schall", "Scheitel", "Schiff", "Schlager", "Schmied", "Schnee", "Scholle", "Schrank", "Schulbus", "Schwan", "Seeadler", "Seefahrt", "Seehund", "Seeufer", "segeln", "Sehnerv", "Seide", "Seilzug", "Senf", "Sessel", "Seufzer", "Sexgott", "Sichtung", "Signal", "Silber", "singen", "Sinn", "Sirup", "Sitzbank", "Skandal", "Skikurs", "Skipper", "Skizze", "Smaragd", "Socke", "Sohn", "Sommer", "Songtext", "Sorte", "Spagat", "Spannung", "Spargel", "Specht", "Speiseöl", "Spiegel", "Sport", "spülen", "Stadtbus", "Stall", "Stärke", "Stativ", "staunen", "Stern", "Stiftung", "Stollen", "Strömung", "Sturm", "Substanz", "Südalpen", "Sumpf", "surfen", "Tabak", "Tafel", "Tagebau", "takeln", "Taktung", "Talsohle", "Tand", "Tanzbär", "Tapir", "Tarantel", "Tarnname", "Tasse", "Tatnacht", "Tatsache", "Tatze", "Taube", "tauchen", "Taufpate", "Taumel", "Teelicht", "Teich", "teilen", "Tempo", "Tenor", "Terrasse", "Testflug", "Theater", "Thermik", "ticken", "Tiefflug", "Tierart", "Tigerhai", "Tinte", "Tischler", "toben", "Toleranz", "Tölpel", "Tonband", "Topf", "Topmodel", "Torbogen", "Torlinie", "Torte", "Tourist", "Tragesel", "trampeln", "Trapez", "Traum", "treffen", "Trennung", "Treue", "Trick", "trimmen", "Trödel", "Trost", "Trumpf", "tüfteln", "Turban", "Turm", "Übermut", "Ufer", "Uhrwerk", "umarmen", "Umbau", "Umfeld", "Umgang", "Umsturz", "Unart", "Unfug", "Unimog", "Unruhe", "Unwucht", "Uranerz", "Urlaub", "Urmensch", "Utopie", "Vakuum", "Valuta", "Vandale", "Vase", "Vektor", "Ventil", "Verb", "Verdeck", "Verfall", "Vergaser", "verhexen", "Verlag", "Vers", "Vesper", "Vieh", "Viereck", "Vinyl", "Virus", "Vitrine", "Vollblut", "Vorbote", "Vorrat", "Vorsicht", "Vulkan", "Wachstum", "Wade", "Wagemut", "Wahlen", "Wahrheit", "Wald", "Walhai", "Wallach", "Walnuss", "Walzer", "wandeln", "Wanze", "wärmen", "Warnruf", "Wäsche", "Wasser", "Weberei", "wechseln", "Wegegeld", "wehren", "Weiher", "Weinglas", "Weißbier", "Weitwurf", "Welle", "Weltall", "Werkbank", "Werwolf", "Wetter", "wiehern", "Wildgans", "Wind", "Wohl", "Wohnort", "Wolf", "Wollust", "Wortlaut", "Wrack", "Wunder", "Wurfaxt", "Wurst", "Yacht", "Yeti", "Zacke", "Zahl", "zähmen", "Zahnfee", "Zäpfchen", "Zaster", "Zaumzeug", "Zebra", "zeigen", "Zeitlupe", "Zellkern", "Zeltdach", "Zensor", "Zerfall", "Zeug", "Ziege", "Zielfoto", "Zimteis", "Zobel", "Zollhund", "Zombie", "Zöpfe", "Zucht", "Zufahrt", "Zugfahrt", "Zugvogel", "Zündung", "Zweck", "Zyklop", } ) entropy-mnemonics-master/german_test.go0000644000175000017500000000426713062365143017334 0ustar freefreepackage mnemonics import ( "bytes" "crypto/rand" "testing" "unicode/utf8" "golang.org/x/text/unicode/norm" ) // TestGermanDictionary checks that the german dictionary is well formed. func TestGermanDictionary(t *testing.T) { // Check for sane constants. if German != "german" { t.Error("unexpected identifier for german dictionary") } if GermanUniquePrefixLen != 4 { t.Error("unexpected prefix len for german dictionary") } // Check that the dictionary has well formed elements, and no repeats. gerMap := make(map[string]struct{}) for _, word := range germanDictionary { // Check that the word is long enough. if utf8.RuneCountInString(word) < GermanUniquePrefixLen { t.Fatal("found a short word:", word) } // Check that the word is normalized. newWord := norm.NFC.String(word) if newWord != word { t.Error("found a non-normalized word:", word) } // Fetch the prefix, composed of the first GermanUniquePrefixLen runes. var prefix []byte var runeCount int for _, r := range word { encR := make([]byte, utf8.RuneLen(r)) utf8.EncodeRune(encR, r) prefix = append(prefix, encR...) runeCount++ if runeCount == GermanUniquePrefixLen { break } } // Check that the prefix is unique. str := string(prefix) _, exists := gerMap[str] if exists { t.Error("found a prefix conflict:", word) } gerMap[str] = struct{}{} } // Do some conversions with the german dictionary. for i := 1; i <= 32; i++ { for j := 0; j < 5; j++ { entropy := make([]byte, i) _, err := rand.Read(entropy) if err != nil { t.Fatal(err) } phrase, err := ToPhrase(entropy, German) if err != nil { t.Fatal(err) } check, err := FromPhrase(phrase, German) if err != nil { t.Fatal(err) } if bytes.Compare(entropy, check) != 0 { t.Error("conversion check failed for the german dictionary") } } } // Check that words in a phrase can be altered according to the prefix // rule. entropy := []byte{1, 2, 3, 4} phrase := Phrase{"bete", "Rieglfffffzzzz", "Abundans"} check, err := FromPhrase(phrase, German) if err != nil { t.Fatal(err) } if bytes.Compare(entropy, check) != 0 { t.Error("phrase substitution failed") } } entropy-mnemonics-master/japanese.go0000644000175000017500000007205513062365143016612 0ustar freefreepackage mnemonics // The Japanese dictionary was pulled from the Monero project, license included // below. // Word list originally created by dabura667 // // Copyright (c) 2014-2015, The Monero Project // // All rights reserved. // // Redistribution and use in source and binary forms, with or without modification, are // permitted provided that the following conditions are met: // // 1. Redistributions of source code must retain the above copyright notice, this list of // conditions and the following disclaimer. // // 2. Redistributions in binary form must reproduce the above copyright notice, this list // of conditions and the following disclaimer in the documentation and/or other // materials provided with the distribution. // // 3. Neither the name of the copyright holder nor the names of its contributors may be // used to endorse or promote products derived from this software without specific // prior written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY // EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF // MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL // THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, // PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS // INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, // STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF // THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. const ( // Japanese is an id pointing to the standard password dictionary for // Japanese users. Japanese DictionaryID = "japanese" // JapaneseUniquePrefixLen indicates the number of letters needed to // achieve unique prefixes throughout the Japanese dictionary. No two // words in the dictionary have the same prefix of len // JapaneseUniquePrefixLen. JapaneseUniquePrefixLen = 3 ) var ( japaneseDictionary = Dictionary{ "あいこくしん", "あいさつ", "あいだ", "あおぞら", "あかちゃん", "あきる", "あけがた", "あける", "あこがれる", "あさい", "あさひ", "あしあと", "あじわう", "あずかる", "あずき", "あそぶ", "あたえる", "あたためる", "あたりまえ", "あたる", "あつい", "あつかう", "あっしゅく", "あつまり", "あつめる", "あてな", "あてはまる", "あひる", "あぶら", "あぶる", "あふれる", "あまい", "あまど", "あまやかす", "あまり", "あみもの", "あめりか", "あやまる", "あゆむ", "あらいぐま", "あらし", "あらすじ", "あらためる", "あらゆる", "あらわす", "ありがとう", "あわせる", "あわてる", "あんい", "あんがい", "あんこ", "あんぜん", "あんてい", "あんない", "あんまり", "いいだす", "いおん", "いがい", "いがく", "いきおい", "いきなり", "いきもの", "いきる", "いくじ", "いくぶん", "いけばな", "いけん", "いこう", "いこく", "いこつ", "いさましい", "いさん", "いしき", "いじゅう", "いじょう", "いじわる", "いずみ", "いずれ", "いせい", "いせえび", "いせかい", "いせき", "いぜん", "いそうろう", "いそがしい", "いだい", "いだく", "いたずら", "いたみ", "いたりあ", "いちおう", "いちじ", "いちど", "いちば", "いちぶ", "いちりゅう", "いつか", "いっしゅん", "いっせい", "いっそう", "いったん", "いっち", "いってい", "いっぽう", "いてざ", "いてん", "いどう", "いとこ", "いない", "いなか", "いねむり", "いのち", "いのる", "いはつ", "いばる", "いはん", "いびき", "いひん", "いふく", "いへん", "いほう", "いみん", "いもうと", "いもたれ", "いもり", "いやがる", "いやす", "いよかん", "いよく", "いらい", "いらすと", "いりぐち", "いりょう", "いれい", "いれもの", "いれる", "いろえんぴつ", "いわい", "いわう", "いわかん", "いわば", "いわゆる", "いんげんまめ", "いんさつ", "いんしょう", "いんよう", "うえき", "うえる", "うおざ", "うがい", "うかぶ", "うかべる", "うきわ", "うくらいな", "うくれれ", "うけたまわる", "うけつけ", "うけとる", "うけもつ", "うける", "うごかす", "うごく", "うこん", "うさぎ", "うしなう", "うしろがみ", "うすい", "うすぎ", "うすぐらい", "うすめる", "うせつ", "うちあわせ", "うちがわ", "うちき", "うちゅう", "うっかり", "うつくしい", "うったえる", "うつる", "うどん", "うなぎ", "うなじ", "うなずく", "うなる", "うねる", "うのう", "うぶげ", "うぶごえ", "うまれる", "うめる", "うもう", "うやまう", "うよく", "うらがえす", "うらぐち", "うらない", "うりあげ", "うりきれ", "うるさい", "うれしい", "うれゆき", "うれる", "うろこ", "うわき", "うわさ", "うんこう", "うんちん", "うんてん", "うんどう", "えいえん", "えいが", "えいきょう", "えいご", "えいせい", "えいぶん", "えいよう", "えいわ", "えおり", "えがお", "えがく", "えきたい", "えくせる", "えしゃく", "えすて", "えつらん", "えのぐ", "えほうまき", "えほん", "えまき", "えもじ", "えもの", "えらい", "えらぶ", "えりあ", "えんえん", "えんかい", "えんぎ", "えんげき", "えんしゅう", "えんぜつ", "えんそく", "えんちょう", "えんとつ", "おいかける", "おいこす", "おいしい", "おいつく", "おうえん", "おうさま", "おうじ", "おうせつ", "おうたい", "おうふく", "おうべい", "おうよう", "おえる", "おおい", "おおう", "おおどおり", "おおや", "おおよそ", "おかえり", "おかず", "おがむ", "おかわり", "おぎなう", "おきる", "おくさま", "おくじょう", "おくりがな", "おくる", "おくれる", "おこす", "おこなう", "おこる", "おさえる", "おさない", "おさめる", "おしいれ", "おしえる", "おじぎ", "おじさん", "おしゃれ", "おそらく", "おそわる", "おたがい", "おたく", "おだやか", "おちつく", "おっと", "おつり", "おでかけ", "おとしもの", "おとなしい", "おどり", "おどろかす", "おばさん", "おまいり", "おめでとう", "おもいで", "おもう", "おもたい", "おもちゃ", "おやつ", "おやゆび", "およぼす", "おらんだ", "おろす", "おんがく", "おんけい", "おんしゃ", "おんせん", "おんだん", "おんちゅう", "おんどけい", "かあつ", "かいが", "がいき", "がいけん", "がいこう", "かいさつ", "かいしゃ", "かいすいよく", "かいぜん", "かいぞうど", "かいつう", "かいてん", "かいとう", "かいふく", "がいへき", "かいほう", "かいよう", "がいらい", "かいわ", "かえる", "かおり", "かかえる", "かがく", "かがし", "かがみ", "かくご", "かくとく", "かざる", "がぞう", "かたい", "かたち", "がちょう", "がっきゅう", "がっこう", "がっさん", "がっしょう", "かなざわし", "かのう", "がはく", "かぶか", "かほう", "かほご", "かまう", "かまぼこ", "かめれおん", "かゆい", "かようび", "からい", "かるい", "かろう", "かわく", "かわら", "がんか", "かんけい", "かんこう", "かんしゃ", "かんそう", "かんたん", "かんち", "がんばる", "きあい", "きあつ", "きいろ", "ぎいん", "きうい", "きうん", "きえる", "きおう", "きおく", "きおち", "きおん", "きかい", "きかく", "きかんしゃ", "ききて", "きくばり", "きくらげ", "きけんせい", "きこう", "きこえる", "きこく", "きさい", "きさく", "きさま", "きさらぎ", "ぎじかがく", "ぎしき", "ぎじたいけん", "ぎじにってい", "ぎじゅつしゃ", "きすう", "きせい", "きせき", "きせつ", "きそう", "きぞく", "きぞん", "きたえる", "きちょう", "きつえん", "ぎっちり", "きつつき", "きつね", "きてい", "きどう", "きどく", "きない", "きなが", "きなこ", "きぬごし", "きねん", "きのう", "きのした", "きはく", "きびしい", "きひん", "きふく", "きぶん", "きぼう", "きほん", "きまる", "きみつ", "きむずかしい", "きめる", "きもだめし", "きもち", "きもの", "きゃく", "きやく", "ぎゅうにく", "きよう", "きょうりゅう", "きらい", "きらく", "きりん", "きれい", "きれつ", "きろく", "ぎろん", "きわめる", "ぎんいろ", "きんかくじ", "きんじょ", "きんようび", "ぐあい", "くいず", "くうかん", "くうき", "くうぐん", "くうこう", "ぐうせい", "くうそう", "ぐうたら", "くうふく", "くうぼ", "くかん", "くきょう", "くげん", "ぐこう", "くさい", "くさき", "くさばな", "くさる", "くしゃみ", "くしょう", "くすのき", "くすりゆび", "くせげ", "くせん", "ぐたいてき", "くださる", "くたびれる", "くちこみ", "くちさき", "くつした", "ぐっすり", "くつろぐ", "くとうてん", "くどく", "くなん", "くねくね", "くのう", "くふう", "くみあわせ", "くみたてる", "くめる", "くやくしょ", "くらす", "くらべる", "くるま", "くれる", "くろう", "くわしい", "ぐんかん", "ぐんしょく", "ぐんたい", "ぐんて", "けあな", "けいかく", "けいけん", "けいこ", "けいさつ", "げいじゅつ", "けいたい", "げいのうじん", "けいれき", "けいろ", "けおとす", "けおりもの", "げきか", "げきげん", "げきだん", "げきちん", "げきとつ", "げきは", "げきやく", "げこう", "げこくじょう", "げざい", "けさき", "げざん", "けしき", "けしごむ", "けしょう", "げすと", "けたば", "けちゃっぷ", "けちらす", "けつあつ", "けつい", "けつえき", "けっこん", "けつじょ", "けっせき", "けってい", "けつまつ", "げつようび", "げつれい", "けつろん", "げどく", "けとばす", "けとる", "けなげ", "けなす", "けなみ", "けぬき", "げねつ", "けねん", "けはい", "げひん", "けぶかい", "げぼく", "けまり", "けみかる", "けむし", "けむり", "けもの", "けらい", "けろけろ", "けわしい", "けんい", "けんえつ", "けんお", "けんか", "げんき", "けんげん", "けんこう", "けんさく", "けんしゅう", "けんすう", "げんそう", "けんちく", "けんてい", "けんとう", "けんない", "けんにん", "げんぶつ", "けんま", "けんみん", "けんめい", "けんらん", "けんり", "こあくま", "こいぬ", "こいびと", "ごうい", "こうえん", "こうおん", "こうかん", "ごうきゅう", "ごうけい", "こうこう", "こうさい", "こうじ", "こうすい", "ごうせい", "こうそく", "こうたい", "こうちゃ", "こうつう", "こうてい", "こうどう", "こうない", "こうはい", "ごうほう", "ごうまん", "こうもく", "こうりつ", "こえる", "こおり", "ごかい", "ごがつ", "ごかん", "こくご", "こくさい", "こくとう", "こくない", "こくはく", "こぐま", "こけい", "こける", "ここのか", "こころ", "こさめ", "こしつ", "こすう", "こせい", "こせき", "こぜん", "こそだて", "こたい", "こたえる", "こたつ", "こちょう", "こっか", "こつこつ", "こつばん", "こつぶ", "こてい", "こてん", "ことがら", "ことし", "ことば", "ことり", "こなごな", "こねこね", "このまま", "このみ", "このよ", "ごはん", "こひつじ", "こふう", "こふん", "こぼれる", "ごまあぶら", "こまかい", "ごますり", "こまつな", "こまる", "こむぎこ", "こもじ", "こもち", "こもの", "こもん", "こやく", "こやま", "こゆう", "こゆび", "こよい", "こよう", "こりる", "これくしょん", "ころっけ", "こわもて", "こわれる", "こんいん", "こんかい", "こんき", "こんしゅう", "こんすい", "こんだて", "こんとん", "こんなん", "こんびに", "こんぽん", "こんまけ", "こんや", "こんれい", "こんわく", "ざいえき", "さいかい", "さいきん", "ざいげん", "ざいこ", "さいしょ", "さいせい", "ざいたく", "ざいちゅう", "さいてき", "ざいりょう", "さうな", "さかいし", "さがす", "さかな", "さかみち", "さがる", "さぎょう", "さくし", "さくひん", "さくら", "さこく", "さこつ", "さずかる", "ざせき", "さたん", "さつえい", "ざつおん", "ざっか", "ざつがく", "さっきょく", "ざっし", "さつじん", "ざっそう", "さつたば", "さつまいも", "さてい", "さといも", "さとう", "さとおや", "さとし", "さとる", "さのう", "さばく", "さびしい", "さべつ", "さほう", "さほど", "さます", "さみしい", "さみだれ", "さむけ", "さめる", "さやえんどう", "さゆう", "さよう", "さよく", "さらだ", "ざるそば", "さわやか", "さわる", "さんいん", "さんか", "さんきゃく", "さんこう", "さんさい", "ざんしょ", "さんすう", "さんせい", "さんそ", "さんち", "さんま", "さんみ", "さんらん", "しあい", "しあげ", "しあさって", "しあわせ", "しいく", "しいん", "しうち", "しえい", "しおけ", "しかい", "しかく", "じかん", "しごと", "しすう", "じだい", "したうけ", "したぎ", "したて", "したみ", "しちょう", "しちりん", "しっかり", "しつじ", "しつもん", "してい", "してき", "してつ", "じてん", "じどう", "しなぎれ", "しなもの", "しなん", "しねま", "しねん", "しのぐ", "しのぶ", "しはい", "しばかり", "しはつ", "しはらい", "しはん", "しひょう", "しふく", "じぶん", "しへい", "しほう", "しほん", "しまう", "しまる", "しみん", "しむける", "じむしょ", "しめい", "しめる", "しもん", "しゃいん", "しゃうん", "しゃおん", "じゃがいも", "しやくしょ", "しゃくほう", "しゃけん", "しゃこ", "しゃざい", "しゃしん", "しゃせん", "しゃそう", "しゃたい", "しゃちょう", "しゃっきん", "じゃま", "しゃりん", "しゃれい", "じゆう", "じゅうしょ", "しゅくはく", "じゅしん", "しゅっせき", "しゅみ", "しゅらば", "じゅんばん", "しょうかい", "しょくたく", "しょっけん", "しょどう", "しょもつ", "しらせる", "しらべる", "しんか", "しんこう", "じんじゃ", "しんせいじ", "しんちく", "しんりん", "すあげ", "すあし", "すあな", "ずあん", "すいえい", "すいか", "すいとう", "ずいぶん", "すいようび", "すうがく", "すうじつ", "すうせん", "すおどり", "すきま", "すくう", "すくない", "すける", "すごい", "すこし", "ずさん", "すずしい", "すすむ", "すすめる", "すっかり", "ずっしり", "ずっと", "すてき", "すてる", "すねる", "すのこ", "すはだ", "すばらしい", "ずひょう", "ずぶぬれ", "すぶり", "すふれ", "すべて", "すべる", "ずほう", "すぼん", "すまい", "すめし", "すもう", "すやき", "すらすら", "するめ", "すれちがう", "すろっと", "すわる", "すんぜん", "すんぽう", "せあぶら", "せいかつ", "せいげん", "せいじ", "せいよう", "せおう", "せかいかん", "せきにん", "せきむ", "せきゆ", "せきらんうん", "せけん", "せこう", "せすじ", "せたい", "せたけ", "せっかく", "せっきゃく", "ぜっく", "せっけん", "せっこつ", "せっさたくま", "せつぞく", "せつだん", "せつでん", "せっぱん", "せつび", "せつぶん", "せつめい", "せつりつ", "せなか", "せのび", "せはば", "せびろ", "せぼね", "せまい", "せまる", "せめる", "せもたれ", "せりふ", "ぜんあく", "せんい", "せんえい", "せんか", "せんきょ", "せんく", "せんげん", "ぜんご", "せんさい", "せんしゅ", "せんすい", "せんせい", "せんぞ", "せんたく", "せんちょう", "せんてい", "せんとう", "せんぬき", "せんねん", "せんぱい", "ぜんぶ", "ぜんぽう", "せんむ", "せんめんじょ", "せんもん", "せんやく", "せんゆう", "せんよう", "ぜんら", "ぜんりゃく", "せんれい", "せんろ", "そあく", "そいとげる", "そいね", "そうがんきょう", "そうき", "そうご", "そうしん", "そうだん", "そうなん", "そうび", "そうめん", "そうり", "そえもの", "そえん", "そがい", "そげき", "そこう", "そこそこ", "そざい", "そしな", "そせい", "そせん", "そそぐ", "そだてる", "そつう", "そつえん", "そっかん", "そつぎょう", "そっけつ", "そっこう", "そっせん", "そっと", "そとがわ", "そとづら", "そなえる", "そなた", "そふぼ", "そぼく", "そぼろ", "そまつ", "そまる", "そむく", "そむりえ", "そめる", "そもそも", "そよかぜ", "そらまめ", "そろう", "そんかい", "そんけい", "そんざい", "そんしつ", "そんぞく", "そんちょう", "ぞんび", "ぞんぶん", "そんみん", "たあい", "たいいん", "たいうん", "たいえき", "たいおう", "だいがく", "たいき", "たいぐう", "たいけん", "たいこ", "たいざい", "だいじょうぶ", "だいすき", "たいせつ", "たいそう", "だいたい", "たいちょう", "たいてい", "だいどころ", "たいない", "たいねつ", "たいのう", "たいはん", "だいひょう", "たいふう", "たいへん", "たいほ", "たいまつばな", "たいみんぐ", "たいむ", "たいめん", "たいやき", "たいよう", "たいら", "たいりょく", "たいる", "たいわん", "たうえ", "たえる", "たおす", "たおる", "たおれる", "たかい", "たかね", "たきび", "たくさん", "たこく", "たこやき", "たさい", "たしざん", "だじゃれ", "たすける", "たずさわる", "たそがれ", "たたかう", "たたく", "ただしい", "たたみ", "たちばな", "だっかい", "だっきゃく", "だっこ", "だっしゅつ", "だったい", "たてる", "たとえる", "たなばた", "たにん", "たぬき", "たのしみ", "たはつ", "たぶん", "たべる", "たぼう", "たまご", "たまる", "だむる", "ためいき", "ためす", "ためる", "たもつ", "たやすい", "たよる", "たらす", "たりきほんがん", "たりょう", "たりる", "たると", "たれる", "たれんと", "たろっと", "たわむれる", "だんあつ", "たんい", "たんおん", "たんか", "たんき", "たんけん", "たんご", "たんさん", "たんじょうび", "だんせい", "たんそく", "たんたい", "だんち", "たんてい", "たんとう", "だんな", "たんにん", "だんねつ", "たんのう", "たんぴん", "だんぼう", "たんまつ", "たんめい", "だんれつ", "だんろ", "だんわ", "ちあい", "ちあん", "ちいき", "ちいさい", "ちえん", "ちかい", "ちから", "ちきゅう", "ちきん", "ちけいず", "ちけん", "ちこく", "ちさい", "ちしき", "ちしりょう", "ちせい", "ちそう", "ちたい", "ちたん", "ちちおや", "ちつじょ", "ちてき", "ちてん", "ちぬき", "ちぬり", "ちのう", "ちひょう", "ちへいせん", "ちほう", "ちまた", "ちみつ", "ちみどろ", "ちめいど", "ちゃんこなべ", "ちゅうい", "ちゆりょく", "ちょうし", "ちょさくけん", "ちらし", "ちらみ", "ちりがみ", "ちりょう", "ちるど", "ちわわ", "ちんたい", "ちんもく", "ついか", "ついたち", "つうか", "つうじょう", "つうはん", "つうわ", "つかう", "つかれる", "つくね", "つくる", "つけね", "つける", "つごう", "つたえる", "つづく", "つつじ", "つつむ", "つとめる", "つながる", "つなみ", "つねづね", "つのる", "つぶす", "つまらない", "つまる", "つみき", "つめたい", "つもり", "つもる", "つよい", "つるぼ", "つるみく", "つわもの", "つわり", "てあし", "てあて", "てあみ", "ていおん", "ていか", "ていき", "ていけい", "ていこく", "ていさつ", "ていし", "ていせい", "ていたい", "ていど", "ていねい", "ていひょう", "ていへん", "ていぼう", "てうち", "ておくれ", "てきとう", "てくび", "でこぼこ", "てさぎょう", "てさげ", "てすり", "てそう", "てちがい", "てちょう", "てつがく", "てつづき", "でっぱ", "てつぼう", "てつや", "でぬかえ", "てぬき", "てぬぐい", "てのひら", "てはい", "てぶくろ", "てふだ", "てほどき", "てほん", "てまえ", "てまきずし", "てみじか", "てみやげ", "てらす", "てれび", "てわけ", "てわたし", "でんあつ", "てんいん", "てんかい", "てんき", "てんぐ", "てんけん", "てんごく", "てんさい", "てんし", "てんすう", "でんち", "てんてき", "てんとう", "てんない", "てんぷら", "てんぼうだい", "てんめつ", "てんらんかい", "でんりょく", "でんわ", "どあい", "といれ", "どうかん", "とうきゅう", "どうぐ", "とうし", "とうむぎ", "とおい", "とおか", "とおく", "とおす", "とおる", "とかい", "とかす", "ときおり", "ときどき", "とくい", "とくしゅう", "とくてん", "とくに", "とくべつ", "とけい", "とける", "とこや", "とさか", "としょかん", "とそう", "とたん", "とちゅう", "とっきゅう", "とっくん", "とつぜん", "とつにゅう", "とどける", "ととのえる", "とない", "となえる", "となり", "とのさま", "とばす", "どぶがわ", "とほう", "とまる", "とめる", "ともだち", "ともる", "どようび", "とらえる", "とんかつ", "どんぶり", "ないかく", "ないこう", "ないしょ", "ないす", "ないせん", "ないそう", "なおす", "ながい", "なくす", "なげる", "なこうど", "なさけ", "なたでここ", "なっとう", "なつやすみ", "ななおし", "なにごと", "なにもの", "なにわ", "なのか", "なふだ", "なまいき", "なまえ", "なまみ", "なみだ", "なめらか", "なめる", "なやむ", "ならう", "ならび", "ならぶ", "なれる", "なわとび", "なわばり", "にあう", "にいがた", "にうけ", "におい", "にかい", "にがて", "にきび", "にくしみ", "にくまん", "にげる", "にさんかたんそ", "にしき", "にせもの", "にちじょう", "にちようび", "にっか", "にっき", "にっけい", "にっこう", "にっさん", "にっしょく", "にっすう", "にっせき", "にってい", "になう", "にほん", "にまめ", "にもつ", "にやり", "にゅういん", "にりんしゃ", "にわとり", "にんい", "にんか", "にんき", "にんげん", "にんしき", "にんずう", "にんそう", "にんたい", "にんち", "にんてい", "にんにく", "にんぷ", "にんまり", "にんむ", "にんめい", "にんよう", "ぬいくぎ", "ぬかす", "ぬぐいとる", "ぬぐう", "ぬくもり", "ぬすむ", "ぬまえび", "ぬめり", "ぬらす", "ぬんちゃく", "ねあげ", "ねいき", "ねいる", "ねいろ", "ねぐせ", "ねくたい", "ねくら", "ねこぜ", "ねこむ", "ねさげ", "ねすごす", "ねそべる", "ねだん", "ねつい", "ねっしん", "ねつぞう", "ねったいぎょ", "ねぶそく", "ねふだ", "ねぼう", "ねほりはほり", "ねまき", "ねまわし", "ねみみ", "ねむい", "ねむたい", "ねもと", "ねらう", "ねわざ", "ねんいり", "ねんおし", "ねんかん", "ねんきん", "ねんぐ", "ねんざ", "ねんし", "ねんちゃく", "ねんど", "ねんぴ", "ねんぶつ", "ねんまつ", "ねんりょう", "ねんれい", "のいず", "のおづま", "のがす", "のきなみ", "のこぎり", "のこす", "のこる", "のせる", "のぞく", "のぞむ", "のたまう", "のちほど", "のっく", "のばす", "のはら", "のべる", "のぼる", "のみもの", "のやま", "のらいぬ", "のらねこ", "のりもの", "のりゆき", "のれん", "のんき", "ばあい", "はあく", "ばあさん", "ばいか", "ばいく", "はいけん", "はいご", "はいしん", "はいすい", "はいせん", "はいそう", "はいち", "ばいばい", "はいれつ", "はえる", "はおる", "はかい", "ばかり", "はかる", "はくしゅ", "はけん", "はこぶ", "はさみ", "はさん", "はしご", "ばしょ", "はしる", "はせる", "ぱそこん", "はそん", "はたん", "はちみつ", "はつおん", "はっかく", "はづき", "はっきり", "はっくつ", "はっけん", "はっこう", "はっさん", "はっしん", "はったつ", "はっちゅう", "はってん", "はっぴょう", "はっぽう", "はなす", "はなび", "はにかむ", "はぶらし", "はみがき", "はむかう", "はめつ", "はやい", "はやし", "はらう", "はろうぃん", "はわい", "はんい", "はんえい", "はんおん", "はんかく", "はんきょう", "ばんぐみ", "はんこ", "はんしゃ", "はんすう", "はんだん", "ぱんち", "ぱんつ", "はんてい", "はんとし", "はんのう", "はんぱ", "はんぶん", "はんぺん", "はんぼうき", "はんめい", "はんらん", "はんろん", "ひいき", "ひうん", "ひえる", "ひかく", "ひかり", "ひかる", "ひかん", "ひくい", "ひけつ", "ひこうき", "ひこく", "ひさい", "ひさしぶり", "ひさん", "びじゅつかん", "ひしょ", } ) entropy-mnemonics-master/japanese_test.go0000644000175000017500000000435113062365143017643 0ustar freefreepackage mnemonics import ( "bytes" "crypto/rand" "testing" "unicode/utf8" "golang.org/x/text/unicode/norm" ) // TestJapaneseDictionary checks that the japanese dictionary is // well formed. func TestJapanesesDictionary(t *testing.T) { // Check for sane constants. if Japanese != "japanese" { t.Error("unexpected identifier for japanese dictionary") } if JapaneseUniquePrefixLen != 3 { t.Error("unexpected prefix len for japanese dictionary") } // Check that the dictionary has well formed elements, and no repeats. japMap := make(map[string]struct{}) for _, word := range japaneseDictionary { // Check that the word is long enough. if utf8.RuneCountInString(word) < JapaneseUniquePrefixLen { t.Fatal("found a short word:", word) } // Check that the word is normalized. newWord := norm.NFC.String(word) if newWord != word { t.Error("found a non-normalized word:", word) } // Fetch the prefix, composed of the first JapaneseUniquePrefixLen // runes. var prefix []byte var runeCount int for _, r := range word { encR := make([]byte, utf8.RuneLen(r)) utf8.EncodeRune(encR, r) prefix = append(prefix, encR...) runeCount++ if runeCount == JapaneseUniquePrefixLen { break } } // Check that the prefix is unique. str := string(prefix) _, exists := japMap[str] if exists { t.Error("found a prefix conflict:", word) } japMap[str] = struct{}{} } // Do some conversions with the japanese dictionary. for i := 1; i <= 32; i++ { for j := 0; j < 5; j++ { entropy := make([]byte, i) _, err := rand.Read(entropy) if err != nil { t.Fatal(err) } phrase, err := ToPhrase(entropy, Japanese) if err != nil { t.Fatal(err) } check, err := FromPhrase(phrase, Japanese) if err != nil { t.Fatal(err) } if bytes.Compare(entropy, check) != 0 { t.Error("conversion check failed for the japanese dictionary") } } } // Check that words in a phrase can be altered according to the prefix // rule. entropy := []byte{1, 2, 3, 4} phrase := Phrase{"えんち", "としょbar", "あふれbaz"} check, err := FromPhrase(phrase, Japanese) if err != nil { t.Fatal(err) } if bytes.Compare(entropy, check) != 0 { t.Error("phrase substitution failed") } } entropy-mnemonics-master/mnemonics.go0000644000175000017500000001545413062365143017014 0ustar freefree// Package mnemonics is a package that converts []byte's into human-friendly // phrases, using common words pulled from a dictionary. The dictionary size is // 1626, and multiple languages are supported. Each dictionary supports // modified phrases. Only the first few characters of each word are important. // These characters form a unique prefix. For example, in the English // dictionary, the unique prefix len (EnglishUniquePrefixLen) is 3, which means // the word 'abbey' could be replaced with the word 'abbot', and the program // would still run as expected. // // The primary purpose of this library is creating human-friendly // cryptographically secure passwords. A cryptographically secure password // needs to contain between 128 and 256 bits of entropy. Humans are typically // incapable of generating sufficiently secure passwords without a random // number generator, and 256-bit random numbers tend to difficult to memorize // and even to write down (a single mistake in the writing, or even a single // somewhat sloppy character can render the backup useless). // // By using a small set of common words instead of random numbers, copying // errors are more easily spotted and memorization is also easier, without // sacrificing password strength. // // The mnemonics package does not have any functions for actually generating // entropy, it just converts existing entropy into human-friendly phrases. package mnemonics import ( "errors" "math/big" "strings" "unicode/utf8" "golang.org/x/text/unicode/norm" ) const ( // DictionarySize specifies the size of the dictionaries that are used by // the mnemonics package. All dictionaries are the same length so that the // same []byte can be encoded into multiple languages and all results will // resemble eachother. DictionarySize = 1626 ) var ( errEmptyInput = errors.New("input has len 0 - not valid for conversion") errUnknownDictionary = errors.New("language not recognized") errUnknownWord = errors.New("word not found in dictionary for given language") ) type ( // DictionaryID is a type-safe identifier that indicates which dictionary // should be used. DictionaryID string // Dictionary is a DictionarySize list of words which can be used to create // human-friendly entropy. Dictionary [DictionarySize]string // Phrase is the human readable version of a random []byte. Most typically, // a phrase is displayed to the user using the String method. Phrase []string ) // The conversion functions can be seen as changing the base of a number. A // []byte can actually be viewed as a slice of base-256 numbers, and a []dict // can be viewed as a slice of base-1626 numbers. The conversions are a little // strange because leading 0's need to be preserved. // // For example, in base 256: // // {0} -> 0 // {255} -> 255 // {0, 0} -> 256 // {1, 0} -> 257 // {0, 1} -> 512 // // Every possible []byte has a unique big.Int which represents it, and every // big.Int represents a unique []byte. // bytesToInt converts a byte slice to a big.Int in a way that preserves // leading 0s, and ensures there is a perfect 1:1 mapping between Int's and // []byte's. func bytesToInt(bs []byte) *big.Int { base := big.NewInt(256) exp := big.NewInt(1) result := big.NewInt(-1) for i := 0; i < len(bs); i++ { tmp := big.NewInt(int64(bs[i])) tmp.Add(tmp, big.NewInt(1)) tmp.Mul(tmp, exp) exp.Mul(exp, base) result.Add(result, tmp) } return result } // intToBytes conversts a big.Int to a []byte, following the conventions // documented at bytesToInt. func intToBytes(bi *big.Int) (bs []byte) { base := big.NewInt(256) for bi.Cmp(base) >= 0 { i := new(big.Int).Mod(bi, base).Int64() bs = append(bs, byte(i)) bi.Sub(bi, base) bi.Div(bi, base) } bs = append(bs, byte(bi.Int64())) return bs } // phraseToInt coverts a phrase into a big.Int, using logic similar to // bytesToInt. func phraseToInt(p Phrase, did DictionaryID) (*big.Int, error) { // Determine which dictionary to use based on the input language. var dict Dictionary var prefixLen int switch { case did == English: dict = englishDictionary prefixLen = EnglishUniquePrefixLen case did == German: dict = germanDictionary prefixLen = GermanUniquePrefixLen case did == Japanese: dict = japaneseDictionary prefixLen = JapaneseUniquePrefixLen default: return nil, errUnknownDictionary } base := big.NewInt(1626) exp := big.NewInt(1) result := big.NewInt(-1) for _, word := range p { // Normalize the input. word = norm.NFC.String(word) // Get the first prefixLen runes from the string. var prefix []byte var runeCount int for _, r := range word { encR := make([]byte, utf8.RuneLen(r)) utf8.EncodeRune(encR, r) prefix = append(prefix, encR...) runeCount++ if runeCount == prefixLen { break } } // Find the index associated with the phrase. var tmp *big.Int found := false for j, word := range dict { if strings.HasPrefix(word, string(prefix)) { tmp = big.NewInt(int64(j)) found = true break } } if !found { return nil, errUnknownWord } // Add the index to the int. tmp.Add(tmp, big.NewInt(1)) tmp.Mul(tmp, exp) exp.Mul(exp, base) result.Add(result, tmp) } return result, nil } // intToPhrase converts a phrase into a big.Int, working in a fashion similar // to bytesToInt. func intToPhrase(bi *big.Int, did DictionaryID) (p Phrase, err error) { // Determine which dictionary to use based on the input language. var dict Dictionary switch { case did == English: dict = englishDictionary case did == German: dict = germanDictionary case did == Japanese: dict = japaneseDictionary default: return nil, errUnknownDictionary } base := big.NewInt(DictionarySize) for bi.Cmp(base) >= 0 { i := new(big.Int).Mod(bi, base).Int64() p = append(p, dict[i]) bi.Sub(bi, base) bi.Div(bi, base) } p = append(p, dict[bi.Int64()]) return p, nil } // ToPhrase converts an input []byte to a human-friendly phrase. The conversion // is reversible. func ToPhrase(entropy []byte, did DictionaryID) (Phrase, error) { if len(entropy) == 0 { return nil, errEmptyInput } intEntropy := bytesToInt(entropy) return intToPhrase(intEntropy, did) } // FromPhrase converts an input phrase back to the original []byte. func FromPhrase(p Phrase, did DictionaryID) ([]byte, error) { if len(p) == 0 { return nil, errEmptyInput } intEntropy, err := phraseToInt(p, did) if err != nil { return nil, err } return intToBytes(intEntropy), nil } // FromString converts an input string into a phrase, and then calls // 'FromPhrase'. func FromString(str string, did DictionaryID) ([]byte, error) { phrase := Phrase(strings.Split(str, " ")) return FromPhrase(phrase, did) } // String combines a phrase into a single string by concatenating the // individual words with space separation. func (p Phrase) String() string { return strings.Join(p, " ") } entropy-mnemonics-master/mnemonics_test.go0000644000175000017500000002346113062365143020050 0ustar freefreepackage mnemonics import ( "bytes" "testing" ) // TestConversions checks ToPhrase and FromPhrase for consistency and sanity. func TestConversions(t *testing.T) { // Try for value {0}. initial := []byte{0} phrase, err := ToPhrase(initial, English) if err != nil { t.Error(err) } if len(phrase) != 1 { t.Fatal("unexpected phrase length") } if phrase[0] != englishDictionary[0] { t.Error("unexpected ToPhrase result") } final, err := FromPhrase(phrase, English) if err != nil { t.Error(err) } if bytes.Compare(initial, final) != 0 { t.Error("failure for value {0}") } // Try for value {1}. initial = []byte{1} phrase, err = ToPhrase(initial, English) if err != nil { t.Error(err) } if len(phrase) != 1 { t.Fatal("unexpected phrase length") } if phrase[0] != englishDictionary[1] { t.Error("unexpected ToPhrase result") } final, err = FromPhrase(phrase, English) if err != nil { t.Error(err) } if bytes.Compare(initial, final) != 0 { t.Error("failure for value {1}") } // Try for value {255}. initial = []byte{255} phrase, err = ToPhrase(initial, English) if err != nil { t.Error(err) } if len(phrase) != 1 { t.Fatal("unexpected phrase length") } if phrase[0] != englishDictionary[255] { t.Error("unexpected ToPhrase result") } final, err = FromPhrase(phrase, English) if err != nil { t.Error(err) } if bytes.Compare(initial, final) != 0 { t.Error("failure for value {255}") } // Try for value {0, 0}. initial = []byte{0, 0} phrase, err = ToPhrase(initial, English) if err != nil { t.Error(err) } if len(phrase) != 1 { t.Fatal("unexpected phrase length") } if phrase[0] != englishDictionary[256] { t.Error("unexpected ToPhrase result") } final, err = FromPhrase(phrase, English) if err != nil { t.Error(err) } if bytes.Compare(initial, final) != 0 { t.Error("failure for value {0, 0}") } // Try for value {1, 0}. initial = []byte{1, 0} phrase, err = ToPhrase(initial, English) if err != nil { t.Error(err) } if len(phrase) != 1 { t.Fatal("unexpected phrase length") } if phrase[0] != englishDictionary[257] { t.Error("unexpected ToPhrase result") } final, err = FromPhrase(phrase, English) if err != nil { t.Error(err) } if bytes.Compare(initial, final) != 0 { t.Error("failure for value {1, 0}") } // Try for value {0, 1}. initial = []byte{0, 1} phrase, err = ToPhrase(initial, English) if err != nil { t.Error(err) } if len(phrase) != 1 { t.Fatal("unexpected phrase length") } if phrase[0] != englishDictionary[512] { t.Error("unexpected ToPhrase result") } final, err = FromPhrase(phrase, English) if err != nil { t.Error(err) } if bytes.Compare(initial, final) != 0 { t.Error("failure for value {0, 1}") } // Try for value {1, 1}. initial = []byte{1, 1} phrase, err = ToPhrase(initial, English) if err != nil { t.Error(err) } if len(phrase) != 1 { t.Fatal("unexpected phrase length") } if phrase[0] != englishDictionary[513] { t.Error("unexpected ToPhrase result") } final, err = FromPhrase(phrase, English) if err != nil { t.Error(err) } if bytes.Compare(initial, final) != 0 { t.Error("failure for value {1, 1}") } // Try for value {2, 1}. initial = []byte{2, 1} phrase, err = ToPhrase(initial, English) if err != nil { t.Error(err) } if len(phrase) != 1 { t.Fatal("unexpected phrase length") } if phrase[0] != englishDictionary[514] { t.Error("unexpected ToPhrase result") } final, err = FromPhrase(phrase, English) if err != nil { t.Error(err) } if bytes.Compare(initial, final) != 0 { t.Error("failure for value {2, 1}") } // Try for value {2, 2}. initial = []byte{2, 2} phrase, err = ToPhrase(initial, English) if err != nil { t.Error(err) } if len(phrase) != 1 { t.Fatal("unexpected phrase length") } if phrase[0] != englishDictionary[770] { t.Error("unexpected ToPhrase result") } final, err = FromPhrase(phrase, English) if err != nil { t.Error(err) } if bytes.Compare(initial, final) != 0 { t.Error("failure for value {2, 2}") } // Try for value {abbey, abbey}. initial = []byte{90, 5} phrase, err = ToPhrase(initial, English) if err != nil { t.Error(err) } if len(phrase) != 2 { t.Fatal("unexpected phrase length") } if phrase[0] != englishDictionary[0] { t.Error("unexpected ToPhrase result") } if phrase[1] != englishDictionary[0] { t.Error("unexpected ToPhrase result") } final, err = FromPhrase(phrase, English) if err != nil { t.Error(err) } if bytes.Compare(initial, final) != 0 { t.Error("failure for value {abbey, abbey}") } // Check that all values going from []byte to phrase and back result in the // original value, as deep as reasonable. for i := 0; i < 256; i++ { initial := []byte{byte(i)} phrase, err := ToPhrase(initial, English) if err != nil { t.Fatal(err) } final, err := FromPhrase(phrase, English) if err != nil { t.Fatal(err) } if bytes.Compare(initial, final) != 0 { t.Error("comparison failed during circular byte check") } } for i := 0; i < 256; i++ { for j := 0; j < 256; j++ { initial := []byte{byte(i), byte(j)} phrase, err := ToPhrase(initial, English) if err != nil { t.Fatal(err) } final, err := FromPhrase(phrase, English) if err != nil { t.Fatal(err) } if bytes.Compare(initial, final) != 0 { t.Error("comparison failed during circular byte check") } } } // It takes too long to try all numbers 3 deep, so only a handful are // picked. All edge numbers are checked. for i := 0; i < 256; i++ { for _, j := range []byte{0, 1, 2, 3, 16, 25, 82, 200, 252, 253, 254, 255} { for _, k := range []byte{0, 1, 2, 3, 9, 29, 62, 104, 105, 217, 252, 253, 254, 255} { initial := []byte{byte(i), j, k} phrase, err := ToPhrase(initial, English) if err != nil { t.Fatal(err) } final, err := FromPhrase(phrase, English) if err != nil { t.Fatal(err) } if bytes.Compare(initial, final) != 0 { t.Error("comparison failed during circular byte check") } } } } // Check that all values going from phrase to []byte and back result in the // original value, as deep as reasonable. for i := 0; i < DictionarySize; i++ { initial := Phrase{englishDictionary[i]} entropy, err := FromPhrase(initial, English) if err != nil { t.Fatal(err) } final, err := ToPhrase(entropy, English) if err != nil { t.Fatal(err) } if len(initial) != len(final) { t.Fatal("conversion error") } for i := range initial { if initial[i] != final[i] { t.Error("conversion error") } } } // It takes too long to try all numbers 2 deep for phrases, so the test it // not comprehensive. All edge numbers are checked. for i := 0; i < DictionarySize; i++ { for _, j := range []int{0, 1, 2, 3, 4, 5, 6, 25, 50, 75, 122, 266, 305, 1620, 1621, 1622, 1623, 1623, 1625} { initial := Phrase{englishDictionary[i], englishDictionary[j]} entropy, err := FromPhrase(initial, English) if err != nil { t.Fatal(err) } final, err := ToPhrase(entropy, English) if err != nil { t.Fatal(err) } if len(initial) != len(final) { t.Fatal("conversion error") } for i := range initial { if initial[i] != final[i] { t.Error("conversion error") } } } } // It takes too long to try all numbers 2 deep for phrases, so the test it // not comprehensive. All edge numbers are checked. for _, i := range []int{0, 1, 2, 3, 4, 5, 6, 25, 50, 75, 122, 266, 305, 1620, 1621, 1622, 1623, 1623, 1625} { for _, j := range []int{0, 1, 2, 3, 4, 5, 6, 25, 50, 75, 122, 266, 305, 1620, 1621, 1622, 1623, 1623, 1625} { for _, k := range []int{0, 1, 2, 3, 4, 5, 6, 25, 50, 75, 122, 266, 305, 1620, 1621, 1622, 1623, 1623, 1625} { initial := Phrase{englishDictionary[i], englishDictionary[j], englishDictionary[k]} entropy, err := FromPhrase(initial, English) if err != nil { t.Fatal(err) } final, err := ToPhrase(entropy, English) if err != nil { t.Fatal(err) } if len(initial) != len(final) { t.Fatal("conversion error") } for i := range initial { if initial[i] != final[i] { t.Error("conversion error") } } } } } } // TestNilInputs tries nil and 0 inputs when using the exported functions. func TestNilInputs(t *testing.T) { _, err := ToPhrase(nil, English) if err != errEmptyInput { t.Error(err) } _, err = FromPhrase(nil, English) if err != errEmptyInput { t.Error(err) } _, err = ToPhrase([]byte{0}, "") if err != errUnknownDictionary { t.Error(err) } _, err = FromPhrase(Phrase{"abbey"}, "") if err != errUnknownDictionary { t.Error(err) } ps := Phrase{}.String() if ps != "" { t.Error(ps) } ps = Phrase{""}.String() if ps != "" { t.Error(ps) } ps = Phrase{"a", ""}.String() if ps != "a " { t.Error(ps) } } // TestUnrecognizedWord tries to decode a phrase that has an unrecognized word. func TestUnrecognizedWord(t *testing.T) { phrase := Phrase{"zzzzzz"} _, err := FromPhrase(phrase, English) if err != errUnknownWord { t.Error(err) } } // TestPhraseString calls String() on a Phrase. func TestPhraseString(t *testing.T) { phrase := Phrase{"abc", "def", "g"} if phrase.String() != "abc def g" { t.Error("Phrase.String() behaving unexpectedly") } } // TestNormalization tries to decode a non-normalized string. func TestNormalization(t *testing.T) { a := Phrase{"abhärten"} b := Phrase{"abh\u00e4rten"} c := Phrase{"abha\u0308rten"} d := Phrase{"abh\u0061\u0308rten"} ba, err := FromPhrase(a, German) if err != nil { t.Error(err) } bb, err := FromPhrase(b, German) if err != nil { t.Error(err) } bc, err := FromPhrase(c, German) if err != nil { t.Error(err) } bd, err := FromPhrase(d, German) if err != nil { t.Error(err) } if bytes.Compare(ba, bb) != 0 { t.Error("bad decoding") } if bytes.Compare(bb, bc) != 0 { t.Error("bad decoding") } if bytes.Compare(bc, bd) != 0 { t.Error("bad decoding") } }