pax_global_header00006660000000000000000000000064132111725200014504gustar00rootroot0000000000000052 comment=ba505f2a5462fc851e6d9f8a558a427eed1187c1 jdupes-1.9/000077500000000000000000000000001321117252000126475ustar00rootroot00000000000000jdupes-1.9/.gitignore000066400000000000000000000005071321117252000146410ustar00rootroot00000000000000# # Build ignores # #.* *.o *.o.* *.a *.so *.so.* *.1.gz # # Never ignore these # !.gitignore # # Normal output and testing dirs # /jdupes /jdupes*.exe /*.pkg.tar.xz # # Backups / patches # *~ *.orig *.rej /*.patch # # debugging and editor stuff # core .gdb_history .gdbinit .*.swp *.gcda *.gcno *.gcov # Mac OS .DS_Store jdupes-1.9/CHANGES000066400000000000000000000123721321117252000136470ustar00rootroot00000000000000jdupes 1.9 - stderr on Windows is no longer polluted or empty when redirected - Added -1/--one-file-system to restrict recursion to the same filesystem - Added a universal exclusion stack which is currently only used for -X - Added -X/--exclude to use exclusion stack; supersedes -x/--xsize - More robust BTRFS enablement behavior in Makefile - Fixed Unicode display for hard linking on Windows - Efficiency improvements to internal memory allocator (string_malloc) - Documentation improvements and updates - Provide "fdupes_oneline.sh" which emulates old "fdupes -1" feature - Single file names passed as arguments are now accepted and processed jdupes 1.8 - All files are now licensed under The MIT License exclusively - Fixed a serious memory alloc bug; upgrading is *strongly* recommended - Several huge improvements to progress indicators - Fix some error message display problems and add more error checking - Fixes for several potential crashes and buffer overflows - Indicate no duplicates were found if printing matches and none exist - On Linux, jdupes now auto-tunes I/O size based on CPU L1 D-cache size - The -v switch now also shows info about bitness in the version string jdupes 1.7 - Incompatible change: zero-length files no longer duplicates by default - New -z/--zeromatch option to consider zero-length files as duplicates - I/O chunk size changed for better performance - The PROGRAM_NAME variable is now used properly during make - Program was re-organized into several split C files jdupes 1.6.2 - Fix: version number shown in jdupes -v wasn't updated in 1.6.1 - Prevent BTRFS dedupe of more files than the kernel can handle - Track directories to avoid scanning the same directory twice jdupes 1.6.1 - Show backslash instead of forward slash as path separator on Windows - Make BTRFS dedupe error messages more informative and less confusing - Minor code tweaks, typo and help text fixes - Split some functions into separate files (jdupes.c was getting large) jdupes 1.6 - Add the -l/--linksoft option to create symbolic links from duplicates - Disable following symlinks to directories when -s/--symlinks is used - Reduce overall memory usage by approximately 5% - Add configurable path buffer sizes and path buffer overflow checks - Fixes for some build warnings seen on ARM and MIPS jdupes 1.5.1 - Significant reduction in memory usage (with a bonus tiny speed boost) - Improvements in string_malloc memory allocator code - Bug fixes for output formatting inconsistencies - Major BTRFS dedupe compilation and functionality fixes - LOW_MEMORY compile option added for more size/speed tradeoff control jdupes 1.5 - Invert -Z option: only "soft abort" if asked explicitly to do so - Tweak internal data chunk size to reduce data cache misses - Fix partial hash optimization - Change PREFIX for building from /usr/local back to /usr jdupes 1.4 - Add support for Unicode file paths on Windows platforms - Discard floating point code of dubious value - Remove -1/--sameline feature which is not practically useful - Process partially complete duplicate scan if CTRL+C is pressed - Add -Z/--hardabort option to disable the new CTRL+C behavior - Add [n]one option to -d/--delete to discard all files in a match set - Minor bug fixes and tweaks to improve behavior jdupes 1.3 - Add -i/--reverse to invert the match sort order - Add -I/--isolate to force cross-parameter matching - Add "loud" debugging messages (-@ switch, build with 'make LOUD=1') jdupes 1.2.1 - Fix a serious bug that caused some duplicates to be missed jdupes 1.2 - Change I/O block size for improved performance - Improved progress indicator behavior with large files; now the progress indicator will update more frequently when full file reads are needed - Windows read speed boost with _O_SEQUENTIAL file flag - Experimental tree rebalance code tuning jdupes 1.1.1 - Fix a bug where the -r switch was always on even if not specified jdupes 1.1 - Work around the 1023-link limit for Windows hard linking so that linking can continue even when the limit is reached - Update documentation to include hard link arrow explanations - Add "time of check to time of use" checks immediately prior to taking actions on files so that files which changed since being checked will not be touched, avoiding potential data loss on "live" data sets - Add debug stats for files skipped due to Windows hard link limit - Change default sort to filename instead of modification time - Replaced Windows "get inode number" code with simpler, faster version - Fixed a bug where an extra newline was at the end of printed matches - Reduced progress delay interval; it was a bit slow on many large files jdupes 1.0.2 - Update jody_hash code to latest version - Change string_malloc to enable future string_free() improvements - Add string_malloc counters for debug stat mode - Add '+size' option to -x/--xsize switch to exclude files larger than the specified size instead of smaller than that size jdupes 1.0.1 - Fix bug in deletion set counter that would show e.g. "Set 1 of 0" - Minor size reductions by merging repeated fixed strings - Add feature flag 'fastmath' to show when compiled with -ffast-math - Corrections to code driven by -Wconversion and -Wwrite-strings jdupes 1.0 First release. For changes before the 'jdupes' name change, see OLD_CHANGES jdupes-1.9/INSTALL000066400000000000000000000050151321117252000137010ustar00rootroot00000000000000Installing jdupes -------------------------------------------------------------------- To install the program, issue the following commands: make su root make install This will install the program in /usr/bin. You may change this to a different location by editing the Makefile. Please refer to the Makefile for an explanation of compile-time options. If you're having trouble compiling, please take a look at the Makefile. Various build options are available and can be turned on at compile time by setting CFLAGS_EXTRA or by passing it to 'make': make CFLAGS_EXTRA=-DYOUR_OPTION make CFLAGS_EXTRA='-DYOUR_OPTION_ONE -DYOUR_OPTION_TWO' This is a list of options that can be "turned on" this way: OMIT_GETOPT_LONG Do not use getopt_long() C library call ON_WINDOWS Modify code to compile with MinGW on Windows USE_TREE_REBALANCE * Use experimental tree rebalancing code CONSIDER_IMBALANCE * Change tree rebalance to analyze weights first * These options may slow down the program somewhat and are off by default. Do not enable them unless you are experimenting. Certain options need to be turned on by setting a variable passed to make instead of using CFLAGS_EXTRA, i.e. 'make DEBUG=1': DEBUG Turn on algorithm statistic reporting with '-D' LOUD '-@' for low-level debugging; enables DEBUG ENABLE_BTRFS Enable '-B/--dedupe' for btrfs deduplication LOW_MEMORY Build for lower memory usage instead of speed The LOW_MEMORY option tweaks various knobs in the program to lower total memory usage. It also disables some features to reduce the size of certain data structures. The improvements in memory usage are not very large, but if you're running in a very RAM-limited environment or have a CPU with very small caches it may be the best choice. A test directory is included so that you may familiarize yourself with the way jdupes operates. You may test the program before installing it by issuing a command such as "./jdupes testdir" or "./jdupes -r testdir", just to name a couple of examples. Refer to the documentation for information on valid options. A comparison shell script is also included. It will run your natively installed 'jdupes' or 'jdupes' with the directories and extra options you specify and compare the run times and output a 'diff' of the two program outputs. Unless the core algorithm or sort behavior is changed, both programs should produce identical outputs and the 'diff' output shouldn't appear at all. To use it, type: ./compare_jdupes.sh [options] jdupes-1.9/LICENSE000066400000000000000000000021211321117252000136500ustar00rootroot00000000000000The MIT License (MIT) Copyright (C) 2015-2017 Jody Lee Bruchon and contributors Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. jdupes-1.9/Makefile000066400000000000000000000110061321117252000143050ustar00rootroot00000000000000# jdupes Makefile ##################################################################### # Standand User Configuration Section # ##################################################################### # PREFIX determines where files will be installed. Common examples # include "/usr" or "/usr/local". PREFIX = /usr # Certain platforms do not support long options (command line options). # To disable long options, uncomment the following line. #CFLAGS += -DOMIT_GETOPT_LONG # Uncomment for Linux with BTRFS support. Needed for -B/--dedupe. # This can also be enabled at build time: 'make ENABLE_BTRFS=1' #CFLAGS += -DENABLE_BTRFS # Uncomment for low memory usage at the expense of speed and features # This can be enabled at build time: 'make LOW_MEMORY=1' #LOW_MEMORY=1 # Uncomment this to build in hardened mode. # This can be enabled at build time: 'make HARDEN=1' #HARDEN=1 ##################################################################### # Developer Configuration Section # ##################################################################### # PROGRAM_NAME determines the installation name and manual page name PROGRAM_NAME = jdupes # BIN_DIR indicates directory where program is to be installed. # Suggested value is "$(PREFIX)/bin" BIN_DIR = $(PREFIX)/bin # MAN_DIR indicates directory where the jdupes man page is to be # installed. Suggested value is "$(PREFIX)/man/man1" MAN_BASE_DIR = $(PREFIX)/share/man MAN_DIR = $(MAN_BASE_DIR)/man1 MAN_EXT = 1 # Required External Tools INSTALL = install # install : UCB/GNU Install compatiable #INSTALL = ginstall RM = rm -f MKDIR = mkdir -p #MKDIR = mkdirhier #MKDIR = mkinstalldirs # Make Configuration CC ?= gcc COMPILER_OPTIONS = -Wall -Wextra -Wwrite-strings -Wcast-align -Wstrict-aliasing -Wstrict-overflow -Wstrict-prototypes -Wpointer-arith -Wundef COMPILER_OPTIONS += -Wshadow -Wfloat-equal -Wstrict-overflow=5 -Waggregate-return -Wcast-qual -Wswitch-default -Wswitch-enum -Wconversion -Wunreachable-code -Wformat=2 -Winit-self COMPILER_OPTIONS += -std=gnu99 -O2 -g -D_FILE_OFFSET_BITS=64 -fstrict-aliasing -pipe ##################################################################### # no need to modify anything beyond this point # ##################################################################### # Debugging code inclusion ifdef LOUD DEBUG=1 COMPILER_OPTIONS += -DLOUD_DEBUG endif ifdef DEBUG COMPILER_OPTIONS += -DDEBUG endif ifdef HARDEN COMPILER_OPTIONS += -Wformat -Wformat-security -D_FORTIFY_SOURCE=2 -fstack-protector-strong -fPIE -fpie -Wl,-z,relro -Wl,-z,now endif # Catch someone trying to enable BTRFS in flags and turn on ENABLE_BTRFS ifneq (,$(findstring DENABLE_BTRFS,$(CFLAGS))) ENABLE_BTRFS=1 endif ifneq (,$(findstring DENABLE_BTRFS,$(CFLAGS_EXTRA))) ENABLE_BTRFS=1 endif # MinGW needs this for printf() conversions to work ifeq ($(OS), Windows_NT) ifndef NO_UNICODE UNICODE=1 COMPILER_OPTIONS += -municode endif COMPILER_OPTIONS += -D__USE_MINGW_ANSI_STDIO=1 OBJS += win_stat.o override undefine ENABLE_BTRFS override undefine HAVE_BTRFS_IOCTL_H endif # Remap old BTRFS support option to new name ifdef HAVE_BTRFS_IOCTL_H ENABLE_BTRFS=1 endif # New BTRFS support option ifdef ENABLE_BTRFS COMPILER_OPTIONS += -DENABLE_BTRFS OBJS += act_dedupefiles.o else OBJS_CLEAN += act_dedupefiles.o endif # Low memory mode ifdef LOW_MEMORY COMPILER_OPTIONS += -DLOW_MEMORY -DJODY_HASH_WIDTH=32 -DSMA_PAGE_SIZE=32768 endif CFLAGS += $(COMPILER_OPTIONS) $(CFLAGS_EXTRA) INSTALL_PROGRAM = $(INSTALL) -m 0755 INSTALL_DATA = $(INSTALL) -m 0644 # ADDITIONAL_OBJECTS - some platforms will need additional object files # to support features not supplied by their vendor. Eg: GNU getopt() #ADDITIONAL_OBJECTS += getopt.o OBJS += jdupes.o jody_hash.o jody_paths.o jody_sort.o jody_win_unicode.o string_malloc.o OBJS += jody_cacheinfo.o OBJS += act_deletefiles.o act_linkfiles.o act_printmatches.o act_summarize.o OBJS += $(ADDITIONAL_OBJECTS) all: jdupes jdupes: $(OBJS) $(CC) $(CFLAGS) $(LDFLAGS) -o $(PROGRAM_NAME) $(OBJS) installdirs: test -e $(DESTDIR)$(BIN_DIR) || $(MKDIR) $(DESTDIR)$(BIN_DIR) test -e $(DESTDIR)$(MAN_DIR) || $(MKDIR) $(DESTDIR)$(MAN_DIR) install: jdupes installdirs $(INSTALL_PROGRAM) $(PROGRAM_NAME) $(DESTDIR)$(BIN_DIR)/$(PROGRAM_NAME) $(INSTALL_DATA) $(PROGRAM_NAME).1 $(DESTDIR)$(MAN_DIR)/$(PROGRAM_NAME).$(MAN_EXT) clean: $(RM) $(OBJS) $(OBJS_CLEAN) $(PROGRAM_NAME) $(PROGRAM_NAME).exe *~ *.gcno *.gcda *.gcov distclean: clean $(RM) *.pkg.tar.xz package: +./chroot_build.sh jdupes-1.9/OLD_CHANGES000066400000000000000000000232561321117252000143500ustar00rootroot00000000000000=== NOTE: This is archived material from 'fdupes' development === === and from the pre-'jdupes' code work. DO NOT EDIT. === The following list, organized by fdupes version, documents changes to fdupes. Every item on the list includes, inside square brackets, a list of indentifiers referring to the people who contributed that particular item. When more than one person is listed the person who contributed the patch or idea appears first, followed by those who've otherwise worked on that item. For a list of contributors names and identifiers please see the CONTRIBUTORS file. Changes from 2.1 to 2.2 [JLB] - Changed fdupes-jody executable names and document texts to use the full 'fdupes-jody' name instead of 'fdupes'. Moved copyrights and contact information to reflect 'fdupes-jody' code responsibility. This is primarily intended to keep fdupes-jody distinctly separate from the fdupes by Adrian Lopez upon which it is based, and to make certain that the correct person gets harassed if it breaks ;-) - Added '-B/--dedupe' feature (not compiled in by default) which sends file match lists directly to the kernel btrfs driver to do block-level data de-duplication. Patch submitted by Sebastian Schmidt . Thanks! - Remove and replace some string function calls. Performance increase shows on benchmarks but not significant in most cases. Changes from 2.0.2 to 2.1 [JLB] - Minor performance improvements to hashing and memory allocation code - Added an experimental tree rebalancing function. It is compiled out by default because benchmarks indicated either no improvement or a slight slowdown compared to an unbalanced tree. To compile it in, try 'make CFLAGS_EXTRA=-DUSE_TREE_REBALANCE' - Increased size of string_malloc pages from 64K to 256K since testing shows a minor performance improvement with large file sets - Made variable scope and type changes for a tiny performance boost Changes from 2.0.1 to 2.0.2 [JLB] - Removed redundant getfilestats() calls for a tiny speed boost - Added a -D/--debug switch to show a set of statistic counters for various parts of the fdupes algorithms. Can be used to determine what fdupes is doing "under the hood" and give insight into why performance may be slower or behave strangely on your data set. To enable it, use DEBUG=1 with your make command, i.e. 'make DEBUG=1' - Performance note: the fdupes algorithm uses a tree data structure and becomes progressively slower as more files are processed and the tree depth grows larger. As of this version, a rewrite of the core algorithm is in progress which will remove this tree structure and significantly improve performance on most large file sets. Changes from 2.0 to 2.0.1 [JLB] - Hard links were treated as identical in match checking but not in match confirmation. This version fixes the problem, increasing speed with file sets involving lots of hard links. - A few minor efficiency improvements were performed Changes from 1.51-jody5 to 2.0 [JLB] - Bumped major version to 2.0 due to the number of behavioral changes and improvements to the code, plus it looks less messy than the hyphenated versioning - Increased "chunk size" for better performance and to solve the disk thrashing problem when checking two large files for a match - When using the -H option, hard links now automatically match each other without performing any file reads - Changed primary memory allocator to string_alloc by Jody Bruchon to improve performance over generic malloc/calloc - Progress indicator now lists the number of duplicate pairs found in addition to the usual file progress count and completion percentage - Progress is updated more rapidly when full file comparisons happen so users are less likely to think fdupes is "frozen" - Floating point code was made optional and is removed by default - A comparison script was added to check built fdupes behavior against whatever fdupes is currently installed on the system - Added "undocumented" -Q / --quick option which is not fully safe but can be used to drastically reduce run time for large data sets if some risk of data loss is acceptable to the user - Added -O/--paramorder option to sort files by the order their parent directory set was specified on the command line first. This makes it possible to choose what directories' files get preserved over others while using -d and -N together - The file list loading progress indicator was revamped. Rather than a simple "spinning pipe" indicator, it now shows the number of files and directories scanned as well as which command line specified set the scanning is currently happening in - fdupes was enhanced to support more than 2 million files total by changing from 'int' internal sizes to the maximum supported by the platform being compiled for - Hard link code was modified to be much safer; now any file is only permanently deleted after a hard link succeeds - Hard links on Windows (on supporting filesystems) are now supported - Hashing code was optimized for a benchmarked 8.4% improvement in file processing overhead (and much more under typical real-world conditions) - Hard linking checks for more error conditions and output is much clearer about what action was taken on any given file Changes from 1.51-jody4-jkl1 to 1.51-jody5 [JLB] - Less malloc()s so less memory usage and a slight speedup - Change --order=name to an intelligent numerically correct sort - Fixed bug where progress text was missing until first update - Performance boost for small files (4KB or less) by not running redundant hashes and comparisons - Test files added for numerically correct sort ordering Changes from 1.51-jody4 to 1.51-jody4-jkl1 [JKL] - added `--xsize=SIZE' option: exclude files of size < SIZE - updated Makefile: `PREFIX = /usr/local' - updated README: Usage to reflect curent parameters Changes from 1.51-jody2 to 1.51-jody4 [JLB] - Add support for hard linking duplicates with -L switch - Updated jody_hash algorithm with much lower collision rate - Remove freopen() call that posed a portability problem - Improved progress indicator behavior - Many minor bug fixes Changes from 1.51 to 1.51-jody2 [JLB] - Switched to C99 - Replaced MD5 with Jody Bruchon's hash function - Added a delay to progress indications for better performance - Removed lots of unused code - Ported fdupes to Microsoft Windows (with MinGW) Changes from 1.50 to 1.51 - Added support for 64-bit file offsets on 32-bit systems. - Using tty for interactive input instead of regular stdin. This is to allow feeding filenames via stdin in future versions of fdupes without breaking interactive deletion feature. - Fixed some typos in --help. - Turned C++ style comments into C style comments. Changes from 1.40 to 1.50-PR2 - Fixed memory leak. [JB] - Added "--summarize" option. [AL] - Added "--recurse:" selective recursion option. [AL] - Added "--noprompt" option for totally automated deletion of duplicate files. - Now sorts duplicates (old to new) for consistent order when listing or deleteing duplicate files. - Now tests for early matching of files, which should help speed up the matching process when large files are involved. - Added warning whenever a file cannot be deleted. [CHL, AL] - Fixed bug where some files would not be closed after failure. [AL] - Fixed bug where confirmmatch() function wouldn't always deal properly with zero-length files. [AL] - Fixed bug where progress indicator would not be cleared when no files were found. [AL] - Removed experimental red-black tree code (it was slower on my system than the default code). [AL] - Modified md5/md5.c to avoid compiler warning. [CHL] - Changes to fdupes.c for compilation under platforms where getopt_long is unavailable. [LR, AL] - Changes to help text for clarity. [AL] - Various changes and improvements to Makefile. [PB, AL] Changes from 1.31 to 1.40 - Added option to omit the first file in each group of matches. [LM, AL] - Added escaping of filenames containing spaces when sameline option is specified. [AL] - Changed version indicator format from "fdupes version X.Y" to the simpler "fdupes X.Y". [AL] - Changed ordering of options appearing in the help text (--help), manpage, and README file. [AL] Changes from 1.30 to 1.31 - Added interactive option to preserve all files during delete procedure (something similar was already in place, but now it's official). [AL] - Updated delete procedure prompt format. [AL] - Cosmetic code changes. [AL] Changes from 1.20 to 1.30 - Added size option to display size of duplicates. [LB, AL] - Added missing typecast for proper compilation under g++. [LB] - Better handling of errors occurring during retrieval of a file's signature. [KK, AL] - No longer displays an error message when specified directories contain no files. [AL] - Added red-black tree structure (experimental compile-time option, disabled by default). [AL] Changes from 1.12 to 1.20 - Fixed bug where program would crash when files being scanned were named pipes or sockets. [FD] - Fix against security risk resulting from the use of a temporary file to store md5sum output. [FD, AL] - Using an external md5sum program is now optional. Started using L. Peter Deutsh's MD5 library instead. [FD, AL] - Added hardlinks option to distinguish between hard links and actual duplicate files. [FD, AL] - Added noempty option to exclude zero-length files from consideration [AL] Changes from 1.11 to 1.12 - Improved handling of extremely long input on preserve prompt (delete option). [SSD, AL] Changes from 1.1 to 1.11 - Started checking file sizes before signatures for better performance. [AB, AL] - Added fdupes manpage. [AB, AL] Changes from 1.0 to 1.1 - Added delete option for semi-automatic deletion of duplicate files. [AL] jdupes-1.9/OLD_CONTRIBUTORS000066400000000000000000000016721321117252000152330ustar00rootroot00000000000000=== NOTE: This is archived material from 'fdupes' development === === and from the pre-'jdupes' code work. DO NOT EDIT. === The following people have contributed in some way to the development of fdupes. Please see the CHANGES file for detailed information on their contributions. Names are listed in alphabetical order. [AB] Adrian Bridgett (adrian.bridgett@iname.com) [AL] Adrian Lopez (adrian2@caribe.net) [CHL] Charles Longeau (chl@tuxfamily.org) [FD] Frank DENIS, a.k.a. Jedi/Sector One, a.k.a. DJ Chrysalis (j@4u.net) [JB] Jean-Baptiste () [JLB] Jody Lee Bruchon (jody@jodybruchon.com) [JKL] Jan Klabacka (jan.klabacka@gmail.com) [KK] Kresimir Kukulj (madmax@pc-hrvoje.srce.hr) [LB] Laurent Bonnaud (Laurent.Bonnaud@iut2.upmf-grenoble.fr) [LM] Luca Montecchiani (m.luca@iname.com) [LR] Lukas Ruf (lukas@lpr.ch) [PB] Peter Bray (Sydney, Australia) [SSD] Steven S. Dick (ssd@nevets.oau.org) jdupes-1.9/README000066400000000000000000000412231321117252000135310ustar00rootroot00000000000000Introduction -------------------------------------------------------------------------- jdupes is a program for identifying and taking actions upon duplicate files. This fork known as 'jdupes' is heavily modified from and improved over the original. See CHANGES for details. A WORD OF WARNING: jdupes IS NOT a drop-in compatible replacement for fdupes! Do not blindly replace fdupes with jdupes in scripts and expect everything to work the same way. Option availability and meanings differ between the two programs. For example, the -I switch in jdupes means "isolate" and blocks intra-argument matching, while in fdupes it means "immediately delete files during scanning without prompting the user." Why use jdupes instead of the original fdupes or other forks? -------------------------------------------------------------------------- The biggest reason is raw speed. In testing on various data sets, jdupes is over 7 times faster than fdupes-1.51 on average. jdupes is the only Windows port of fdupes. Most duplicate scanners built on Linux and other UNIX-like systems do not compile for Windows out-of-the-box and even if they do, they don't support Unicode and other Windows-specific quirks and features. jdupes is generally stable. All releases of jdupes are compared against a known working reference versions of fdupes or jdupes to be certain that output does not change. You get the benefits of an aggressive development process without putting your data at increased risk. Code in jdupes is written with data loss avoidance as the highest priority. If a choice must be made between being aggressive or careful, the careful way is always chosen. jdupes includes features that are not always found elsewhere. Examples of such features include btrfs block-level deduplication and control over which file is kept when a match set is automatically deleted. jdupes is not afraid of dropping features of low value; a prime example is the -1 switch which outputs all matches in a set on one line, a feature which was found to be useless in real-world tests and therefore thrown out. The downside is that jdupes development is never guaranteed to be bug-free! If the program eats your dog or sets fire to your lawn, the authors cannot be held responsible. If you notice a bug, please report it. While jdupes maintains some degree of compatibility with fdupes from which it was originally derived, there is no guarantee that it will continue to maintain such compatibility in the future. However, compatibility will be retained between minor versions, i.e. jdupes-1.6 and jdupes-1.6.1 should not have any significant differences in results with identical command lines. What jdupes is not: a similar (but not identical) file finding tool -------------------------------------------------------------------------- Please note that jdupes ONLY works on 100% exact matches. It does not have any sort of "similarity" matching, nor does it know anything about any specific file formats such as images or sounds. Something as simple as a change in embedded metadata such as the ID3 tags in an MP3 file or the EXIF information in a JPEG image will not change the sound or image presented to the user when opened, but technically it makes the file no longer identical to the original. Plenty of excellent tools already exist to "fuzzy match" specific file types using knowledge of their file formats to help. There are no plans to add this type of matching to jdupes. Usage -------------------------------------------------------------------------- Usage: jdupes [options] DIRECTORY... -@ --loud output annoying low-level debug info while running -1 --one-file-system do not match files on different filesystems/devices -A --nohidden exclude hidden files from consideration -B --dedupe Send matches to btrfs for block-level deduplication -d --delete prompt user for files to preserve and delete all others; important: under particular circumstances, data may be lost when using this option together with -s or --symlinks, or when specifying a particular directory more than once; refer to the documentation for additional information -D --debug output debug statistics after completion -f --omitfirst omit the first file in each set of matches -h --help display this help message -H --hardlinks treat any linked files as duplicate files. Normally linked files are treated as non-duplicates for safety -i --reverse reverse (invert) the match sort order -I --isolate files in the same specified directory won't match -l --linksoft make relative symlinks for duplicates w/o prompting -L --linkhard hard link all duplicate files without prompting -m --summarize summarize dupe information -N --noprompt together with --delete, preserve the first file in each set of duplicates and delete the rest without prompting the user -o --order=BY select sort order for output, linking and deleting; by -O --paramorder Parameter order is more important than selected -O sort mtime (BY=time) or filename (BY=name, the default) -p --permissions don't consider files with different owner/group or permission bits as duplicates -Q --quick skip byte-for-byte confirmation for quick matching WARNING: -Q can result in data loss! Be very careful! -r --recurse for every directory, process its subdirectories too -R --recurse: for each directory given after this option follow subdirectories encountered within (note the ':' at the end of the option, manpage for more details) -s --symlinks follow symlinks -S --size show size of duplicate files -q --quiet hide progress indicator -v --version display jdupes version and license information -x --xsize=SIZE exclude files of size < SIZE bytes from consideration --xsize=+SIZE '+' specified before SIZE, exclude size > SIZE -X --exclude=spec:info exclude files based on specified criteria specs: dir size+-= Exclusions are cumulative: -X dir:abc -X dir:efg -z --zeromatch consider zero-length files to be duplicates -Z --softabort If the user aborts (i.e. CTRL-C) act on matches so far For sizes, K/M/G/T/P/E[B|iB] suffixes can be used (case-insensitive) The -n/--noempty option was removed for safety. Matching zero-length files as duplicates now requires explicit use of the -z/--zeromatch option instead. Duplicate files are listed together in groups with each file displayed on a Separate line. The groups are then separated from each other by blank lines. The -s/--symlinks option will treat symlinked files as regular files, but direct symlinks will be treated as if they are hard linked files and the -H/--hardlinks option will apply to them in the same manner. When using -d or --delete, care should be taken to insure against accidental data loss. While no information will be immediately lost, using this option together with -s or --symlink can lead to confusing information being presented to the user when prompted for files to preserve. Specifically, a user could accidentally preserve a symlink while deleting the file it points to. A similar problem arises when specifying a particular directory more than once. All files within that directory will be listed as their own duplicates, leading to data loss should a user preserve a file without its "duplicate" (the file itself!) The -I/--isolate option attempts to block matches that are contained in the same specified directory parameter on the command line. Due to the underlying nature of the jdupes algorithm, a lot of matches will be blocked by this option that probably should not be. This code could use improvement. Hard and soft (symbolic) linking status symbols and behavior -------------------------------------------------------------------------- A set of arrows are used in file linking to show what action was taken on each link candidate. These arrows are as follows: ----> File was hard linked to the first file in the duplicate chain -@@-> File was symlinked to the first file in the chain -==-> Already a hard link to the first file in the chain -//-> File linking failed due to an error during the linking process If your data set has linked files and you do not use -H to always consider them as duplicates, you may still see linked files appear together in match sets. This is caused by a separate file that matches with linked files independently and is the correct behavior. See notes below on the "triangle problem" in jdupes for technical details. Microsoft Windows platform-specific notes -------------------------------------------------------------------------- The Windows port does not support Unicode, only ANSI file names. This is because Unicode support on Windows is difficult to add to existing code without making it very messy or breaking things. Support is eventually planned for Unicode on Windows. Windows has a hard limit of 1024 hard links per file. There is no way to change this. The documentation for CreateHardLink() states: "The maximum number of hard links that can be created with this function is 1023 per file. If more than 1023 links are created for a file, an error results." (The number is actually 1024, but they're ignoring the first file.) The current jdupes algorithm's "triangle problem" -------------------------------------------------------------------------- Pairs of files are excluded individually based on how the two files compare. For example, if --hardlinks is not specified then two files which are hard linked will not match one another for duplicate scanning purposes. The problem with only examining files in pairs is that certain circumstances will lead to the exclusion being overridden. Let's say we have three files with identical contents: a/file1 a/file2 a/file3 and 'a/file1' is linked to 'a/file3'. Here's how 'jdupes a/' sees them: --- Are 'a/file1' and 'a/file2' matches? Yes [point a/file1->duplicates to a/file2] Are 'a/file1' and 'a/file3' matches? No (hard linked already, -H off) Are 'a/file2' and 'a/file3' matches? Yes [point a/file2->duplicates to a/file3] --- Now you have the following duplicate list: a/file1->duplicates ==> a/file2->duplicates ==> a/file3 The solution is to split match sets into multiple sets, but doing this will also remove the guarantee that files will only ever appear in one match set and could result in data loss if handled improperly. In the future, options for "greedy" and "sparse" may be introduced to switch between allowing triangle matches to be in the same set vs. splitting sets after matching finishes without the "only ever appears once" guarantee. Does jdupes meet the "Good Practice when Deleting Duplicates" by rmlint? -------------------------------------------------------------------------- Yes. If you've not read this list of cautions, it is available at http://rmlint.readthedocs.io/en/latest/cautions.html Here's a breakdown of how jdupes addresses each of the items listed. "Backup your data" "Measure twice, cut once" These guidelines are for the user of duplicate scanning software, not the software itself. Back up your files regularly. Use jdupes to print a list of what is found as duplicated and check that list very carefully before automatically deleting the files. "Beware of unusual filename characters" The only character that poses a concern in jdupes is a newline '\n' and that is only a problem because the duplicate set printer uses them to separate file names. Actions taken by jdupes are not parsed like a command line, so spaces and other weird characters in names aren't a problem. Escaping the names properly if acting on the printed output is a problem for the user's shell script or other external program. "Consider safe removal options" This is also an exercise for the user. "Traversal Robustness" jdupes tracks each directory traversed by dev:inode pair to avoid adding the contents of the same directory twice. This prevents the user from being able to register all of their files twice by duplicating an entry on the command line. Symlinked directories are only followed if they weren't already followed earlier. Files are renamed to a temporary name before any linking is done and if the link operation fails they are renamed back to the original name. "Collision Robustness" jdupes uses jodyhash for file data hashing. This hash is extremely fast with a low collision rate, but it still encounters collisions as any hash function will ("secure" or otherwise) due to the pigeonhole principle. This is why jdupes performs a full-file verification before declaring a match. It's slower than matching by hash only, but the pigeonhole principle puts all data sets larger than the hash at risk of collision, meaning a false duplicate detection and data loss. The slower completion time is not as important as data integrity. Checking for a match based on hashes alone is irresponsible, and using secure hashes like MD5 or the SHA families is orders of magnitude slower than jodyhash while still suffering from the risk brought about by the pigeonholing. An example of this problem is as follows: if you have 365 days in a year and 366 people, the chance of having at least two birthdays on the same day is guaranteed; likewise, even though SHA512 is a 512-bit (64-byte) wide hash, there are guaranteed to be at least 256 pairs of data streams that causes a collision once any of the data streams being hashed for comparison is 65 bytes (520 bits) or larger. "Unusual Characters Robustness" jdupes does not protect the user from putting ASCII control characters in their file names; they will mangle the output if printed, but they can still be operated upon by the actions (delete, link, etc.) in jdupes. "Seek Thrash Robustness" jdupes uses an I/O chunk size that is optimized for reading as much as possible from disk at once to take advantage of high sequential read speeds in traditional rotating media drives while balancing against the significantly higher rate of CPU cache misses triggered by an excessively large I/O buffer size. Enlarging the I/O buffer further may allow for lots of large files to be read with less head seeking, but the CPU cache misses slow the algorithm down and memory usage increases to hold these large buffers. jdupes is benchmarked periodically to make sure that the chosen I/O chunk size is the best compromise for a wide variety of data sets. "Memory Usage Robustness" This is a very subjective concern considering that even a cell phone in someone's pocket has at least 1GB of RAM, however it still applies in the embedded device world where 32MB of RAM might be all that you can have. Even when processing a data set with over a million files, jdupes memory usage (tested on Linux x86_64 with -O3 optimization) doesn't exceed 2GB. A low memory mode can be chosen at compile time to reduce overall memory usage with a small performance penalty. Contact Information -------------------------------------------------------------------------- For all jdupes inquiries, contact Jody Bruchon Please DO NOT contact Adrian Lopez about issues with jdupes. Legal Information and Software License -------------------------------------------------------------------------- jdupes is Copyright (C) 2015-2017 by Jody Bruchon Derived from the original 'fdupes' (C) 1999-2017 by Adrian Lopez Includes other code libraries which are (C) 2015-2017 by Jody Bruchon The MIT License Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. jdupes-1.9/TODO000066400000000000000000000010621321117252000133360ustar00rootroot00000000000000- Option -R should not have to be separated from the rest, such that "jdupes -dR testdir", "jdupes -d -R testdir", "jdupes -Rd testdir", etc., all yield the same results. - Add option to highlight or identify symlinked files (suggest using --classify to identify symlinks with @ suffix... when specified, files containing @ are listed using \@). - Consider option to match only to files in specific directory. - Add a way to store stat() info + inital and full hashes for explicit loading in future runs to speed up repeated calls to the program. jdupes-1.9/act_dedupefiles.c000066400000000000000000000144011321117252000161330ustar00rootroot00000000000000/* BTRFS deduplication of file blocks * This file is part of jdupes; see jdupes.c for license information */ #include "jdupes.h" #ifdef ENABLE_BTRFS #include #include #include #include #include #include #include #include #include #include "act_dedupefiles.h" /* Message to append to BTRFS warnings based on write permissions */ static const char *readonly_msg[] = { "", " (no write permission)" }; static char *dedupeerrstr(int err) { static char buf[256]; buf[sizeof(buf)-1] = '\0'; if (err == BTRFS_SAME_DATA_DIFFERS) { snprintf(buf, sizeof(buf), "BTRFS_SAME_DATA_DIFFERS (data modified in the meantime?)"); return buf; } else if (err < 0) { return strerror(-err); } else { snprintf(buf, sizeof(buf), "Unknown error %d", err); return buf; } } extern void dedupefiles(file_t * restrict files) { struct btrfs_ioctl_same_args *same; char **dupe_filenames; /* maps to same->info indices */ file_t *curfile; unsigned int n_dupes, max_dupes, cur_info; unsigned int cur_file = 0, max_files, total_files = 0; int fd; int ret, status, readonly; LOUD(fprintf(stderr, "\nRunning dedupefiles()\n");) /* Find the largest dupe set, alloc space to hold structs for it */ get_max_dupes(files, &max_dupes, &max_files); /* Kernel dupe count is a uint16_t so exit if the type's limit is exceeded */ if (max_dupes > 65535) { fprintf(stderr, "Largest duplicate set (%d) exceeds the 65535-file dedupe limit.\n", max_dupes); fprintf(stderr, "Ask the program author to add this feature if you really need it. Exiting!\n"); exit(EXIT_FAILURE); } same = calloc(sizeof(struct btrfs_ioctl_same_args) + sizeof(struct btrfs_ioctl_same_extent_info) * max_dupes, 1); dupe_filenames = malloc(max_dupes * sizeof(char *)); LOUD(fprintf(stderr, "dedupefiles structs: alloc1 size %lu => %p, alloc2 size %lu => %p\n", sizeof(struct btrfs_ioctl_same_args) + sizeof(struct btrfs_ioctl_same_extent_info) * max_dupes, (void *)same, max_dupes * sizeof(char *), (void *)dupe_filenames);) if (!same || !dupe_filenames) oom("dedupefiles() structures"); /* Main dedupe loop */ while (files) { if (ISFLAG(files->flags, F_HAS_DUPES) && files->size) { cur_file++; if (!ISFLAG(flags, F_HIDEPROGRESS)) { fprintf(stderr, "Dedupe [%u/%u] %u%% \r", cur_file, max_files, cur_file * 100 / max_files); } /* Open each file to be deduplicated */ cur_info = 0; for (curfile = files->duplicates; curfile; curfile = curfile->duplicates) { int errno2; /* Never allow hard links to be passed to dedupe */ if (curfile->device == files->device && curfile->inode == files->inode) { LOUD(fprintf(stderr, "skipping hard linked file pair: '%s' = '%s'\n", curfile->d_name, files->d_name);) continue; } dupe_filenames[cur_info] = curfile->d_name; readonly = 0; if (access(curfile->d_name, W_OK) != 0) readonly = 1; fd = open(curfile->d_name, O_RDWR); LOUD(fprintf(stderr, "opening loop: open('%s', O_RDWR) [%d]\n", curfile->d_name, fd);) /* If read-write open fails, privileged users can dedupe in read-only mode */ if (fd == -1) { /* Preserve errno in case read-only fallback fails */ LOUD(fprintf(stderr, "opening loop: open('%s', O_RDWR) failed: %s\n", curfile->d_name, strerror(errno));) errno2 = errno; fd = open(curfile->d_name, O_RDONLY); if (fd == -1) { LOUD(fprintf(stderr, "opening loop: fallback open('%s', O_RDONLY) failed: %s\n", curfile->d_name, strerror(errno));) fprintf(stderr, "Unable to open '%s': %s%s\n", curfile->d_name, strerror(errno2), readonly_msg[readonly]); continue; } LOUD(fprintf(stderr, "opening loop: fallback open('%s', O_RDONLY) succeeded\n", curfile->d_name);) } same->info[cur_info].fd = fd; same->info[cur_info].logical_offset = 0; cur_info++; total_files++; } n_dupes = cur_info; same->logical_offset = 0; same->length = (unsigned long)files->size; same->dest_count = (uint16_t)n_dupes; /* kernel type is __u16 */ fd = open(files->d_name, O_RDONLY); LOUD(fprintf(stderr, "source: open('%s', O_RDONLY) [%d]\n", files->d_name, fd);) if (fd == -1) { fprintf(stderr, "unable to open(\"%s\", O_RDONLY): %s\n", files->d_name, strerror(errno)); goto cleanup; } /* Call dedupe ioctl to pass the files to the kernel */ ret = ioctl(fd, BTRFS_IOC_FILE_EXTENT_SAME, same); LOUD(fprintf(stderr, "dedupe: ioctl('%s' [%d], BTRFS_IOC_FILE_EXTENT_SAME, same) => %d\n", files->d_name, fd, ret);) if (close(fd) == -1) fprintf(stderr, "Unable to close(\"%s\"): %s\n", files->d_name, strerror(errno)); if (ret < 0) { fprintf(stderr, "dedupe failed against file '%s' (%d matches): %s\n", files->d_name, n_dupes, strerror(errno)); goto cleanup; } for (cur_info = 0; cur_info < n_dupes; cur_info++) { status = same->info[cur_info].status; if (status != 0) { if (same->info[cur_info].bytes_deduped == 0) { fprintf(stderr, "warning: dedupe failed: %s => %s: %s [%d]%s\n", files->d_name, dupe_filenames[cur_info], dedupeerrstr(status), status, readonly_msg[readonly]); } else { fprintf(stderr, "warning: dedupe only did %" PRIdMAX " bytes: %s => %s: %s [%d]%s\n", (intmax_t)same->info[cur_info].bytes_deduped, files->d_name, dupe_filenames[cur_info], dedupeerrstr(status), status, readonly_msg[readonly]); } } } cleanup: for (cur_info = 0; cur_info < n_dupes; cur_info++) { if (close((int)same->info[cur_info].fd) == -1) { fprintf(stderr, "unable to close(\"%s\"): %s", dupe_filenames[cur_info], strerror(errno)); } } } /* has dupes */ files = files->next; } if (!ISFLAG(flags, F_HIDEPROGRESS)) fprintf(stderr, "Deduplication done (%d files processed)\n", total_files); free(same); free(dupe_filenames); return; } #endif /* ENABLE_BTRFS */ jdupes-1.9/act_dedupefiles.h000066400000000000000000000005361321117252000161440ustar00rootroot00000000000000/* jdupes action for BTRFS block-level deduplication * This file is part of jdupes; see jdupes.c for license information */ #ifndef ACT_DEDUPEFILES_H #define ACT_DEDUPEFILES_H #ifdef __cplusplus extern "C" { #endif #include "jdupes.h" extern void dedupefiles(file_t * restrict files); #ifdef __cplusplus } #endif #endif /* ACT_DEDUPEFILES_H */ jdupes-1.9/act_deletefiles.c000066400000000000000000000103721321117252000161320ustar00rootroot00000000000000/* Delete duplicate files automatically or interactively * This file is part of jdupes; see jdupes.c for license information */ #include #include #include #include #include #include "jdupes.h" #include "jody_win_unicode.h" #include "act_deletefiles.h" extern void deletefiles(file_t *files, int prompt, FILE *tty) { unsigned int counter, groups; unsigned int curgroup = 0; file_t *tmpfile; file_t **dupelist; unsigned int *preserve; char *preservestr; char *token; char *tstr; unsigned int number, sum, max, x; size_t i; if (!files) return; groups = get_max_dupes(files, &max, NULL); max++; dupelist = (file_t **) malloc(sizeof(file_t*) * max); preserve = (unsigned int *) malloc(sizeof(int) * max); preservestr = (char *) malloc(INPUT_SIZE); if (!dupelist || !preserve || !preservestr) oom("deletefiles() structures"); for (; files; files = files->next) { if (ISFLAG(files->flags, F_HAS_DUPES)) { curgroup++; counter = 1; dupelist[counter] = files; if (prompt) { printf("[%u] ", counter); fwprint(stdout, files->d_name, 1); } tmpfile = files->duplicates; while (tmpfile) { dupelist[++counter] = tmpfile; if (prompt) { printf("[%u] ", counter); fwprint(stdout, tmpfile->d_name, 1); } tmpfile = tmpfile->duplicates; } if (prompt) printf("\n"); /* preserve only the first file */ if (!prompt) { preserve[1] = 1; for (x = 2; x <= counter; x++) preserve[x] = 0; } else do { /* prompt for files to preserve */ printf("Set %u of %u: keep which files? (1 - %u, [a]ll, [n]one)", curgroup, groups, counter); if (ISFLAG(flags, F_SHOWSIZE)) printf(" (%" PRIuMAX " byte%c each)", (uintmax_t)files->size, (files->size != 1) ? 's' : ' '); printf(": "); fflush(stdout); /* treat fgets() failure as if nothing was entered */ if (!fgets(preservestr, INPUT_SIZE, tty)) preservestr[0] = '\n'; i = strlen(preservestr) - 1; /* tail of buffer must be a newline */ while (preservestr[i] != '\n') { tstr = (char *)realloc(preservestr, strlen(preservestr) + 1 + INPUT_SIZE); if (!tstr) oom("deletefiles() prompt string"); preservestr = tstr; if (!fgets(preservestr + i + 1, INPUT_SIZE, tty)) { preservestr[0] = '\n'; /* treat fgets() failure as if nothing was entered */ break; } i = strlen(preservestr) - 1; } for (x = 1; x <= counter; x++) preserve[x] = 0; token = strtok(preservestr, " ,\n"); if (token != NULL && (*token == 'n' || *token == 'N')) goto preserve_none; while (token != NULL) { if (*token == 'a' || *token == 'A') for (x = 0; x <= counter; x++) preserve[x] = 1; number = 0; sscanf(token, "%u", &number); if (number > 0 && number <= counter) preserve[number] = 1; token = strtok(NULL, " ,\n"); } for (sum = 0, x = 1; x <= counter; x++) sum += preserve[x]; } while (sum < 1); /* save at least one file */ preserve_none: printf("\n"); for (x = 1; x <= counter; x++) { if (preserve[x]) { printf(" [+] "); fwprint(stdout, dupelist[x]->d_name, 1); } else { #ifdef UNICODE if (!M2W(dupelist[x]->d_name, wstr)) { printf(" [!] "); fwprint(stdout, dupelist[x]->d_name, 0); printf("-- MultiByteToWideChar failed\n"); continue; } #endif if (file_has_changed(dupelist[x])) { printf(" [!] "); fwprint(stdout, dupelist[x]->d_name, 0); printf("-- file changed since being scanned\n"); #ifdef UNICODE } else if (DeleteFile(wstr) != 0) { #else } else if (remove(dupelist[x]->d_name) == 0) { #endif printf(" [-] "); fwprint(stdout, dupelist[x]->d_name, 1); } else { printf(" [!] "); fwprint(stdout, dupelist[x]->d_name, 0); printf("-- unable to delete file\n"); } } } printf("\n"); } } free(dupelist); free(preserve); free(preservestr); return; } jdupes-1.9/act_deletefiles.h000066400000000000000000000005441321117252000161370ustar00rootroot00000000000000/* jdupes action for deleting duplicate files * This file is part of jdupes; see jdupes.c for license information */ #ifndef ACT_DELETEFILES_H #define ACT_DELETEFILES_H #ifdef __cplusplus extern "C" { #endif #include "jdupes.h" extern void deletefiles(file_t *files, int prompt, FILE *tty); #ifdef __cplusplus } #endif #endif /* ACT_DELETEFILES_H */ jdupes-1.9/act_linkfiles.c000066400000000000000000000261441321117252000156310ustar00rootroot00000000000000/* Hard link or symlink files * This file is part of jdupes; see jdupes.c for license information */ #include "jdupes.h" /* Compile out the code if no linking support is built in */ #if !(defined NO_HARDLINKS && defined NO_SYMLINKS) #include #include #include #include #include "act_linkfiles.h" #include "jody_win_unicode.h" #ifdef ON_WINDOWS #include "win_stat.h" #endif extern void linkfiles(file_t *files, const int hard) { static file_t *tmpfile; static file_t *srcfile; static file_t *curfile; static file_t ** restrict dupelist; static unsigned int counter; static unsigned int max = 0; static unsigned int x = 0; static size_t name_len = 0; static int i, success; #ifndef NO_SYMLINKS static unsigned int symsrc; static char rel_path[PATHBUF_SIZE]; #endif static char temp_path[PATHBUF_SIZE]; LOUD(fprintf(stderr, "Running linkfiles(%d)\n", hard);) curfile = files; while (curfile) { if (ISFLAG(curfile->flags, F_HAS_DUPES)) { counter = 1; tmpfile = curfile->duplicates; while (tmpfile) { counter++; tmpfile = tmpfile->duplicates; } if (counter > max) max = counter; } curfile = curfile->next; } max++; dupelist = (file_t**) malloc(sizeof(file_t*) * max); if (!dupelist) oom("linkfiles() dupelist"); while (files) { if (ISFLAG(files->flags, F_HAS_DUPES)) { counter = 1; dupelist[counter] = files; tmpfile = files->duplicates; while (tmpfile) { counter++; dupelist[counter] = tmpfile; tmpfile = tmpfile->duplicates; } /* Link every file to the first file */ if (hard) { #ifndef NO_HARDLINKS x = 2; srcfile = dupelist[1]; #else fprintf(stderr, "internal error: linkfiles(hard) called without hard link support\nPlease report this to the author as a program bug\n"); exit(EXIT_FAILURE); #endif } else { #ifndef NO_SYMLINKS x = 1; /* Symlinks should target a normal file if one exists */ srcfile = NULL; for (symsrc = 1; symsrc <= counter; symsrc++) { if (!ISFLAG(dupelist[symsrc]->flags, F_IS_SYMLINK)) { srcfile = dupelist[symsrc]; break; } } /* If no normal file exists, abort */ if (srcfile == NULL) continue; #else fprintf(stderr, "internal error: linkfiles(soft) called without symlink support\nPlease report this to the author as a program bug\n"); exit(EXIT_FAILURE); #endif } if (!ISFLAG(flags, F_HIDEPROGRESS)) { printf("[SRC] "); fwprint(stdout, srcfile->d_name, 1); } for (; x <= counter; x++) { if (hard == 1) { /* Can't hard link files on different devices */ if (srcfile->device != dupelist[x]->device) { fprintf(stderr, "warning: hard link target on different device, not linking:\n-//-> "); fwprint(stderr, dupelist[x]->d_name, 1); continue; } else { /* The devices for the files are the same, but we still need to skip * anything that is already hard linked (-L and -H both set) */ if (srcfile->inode == dupelist[x]->inode) { /* Don't show == arrows when not matching against other hard links */ if (ISFLAG(flags, F_CONSIDERHARDLINKS)) if (!ISFLAG(flags, F_HIDEPROGRESS)) { printf("-==-> "); fwprint(stdout, dupelist[x]->d_name, 1); } continue; } } } else { /* Symlink prerequisite check code can go here */ /* Do not attempt to symlink a file to itself or to another symlink */ #ifndef NO_SYMLINKS if (ISFLAG(dupelist[x]->flags, F_IS_SYMLINK) && ISFLAG(dupelist[symsrc]->flags, F_IS_SYMLINK)) continue; if (x == symsrc) continue; #endif } #ifdef UNICODE if (!M2W(dupelist[x]->d_name, wname)) { fprintf(stderr, "error: MultiByteToWideChar failed: "); fwprint(stderr, dupelist[x]->d_name, 1); continue; } #endif /* UNICODE */ /* Do not attempt to hard link files for which we don't have write access */ #ifdef ON_WINDOWS if (dupelist[x]->mode & FILE_ATTRIBUTE_READONLY) #else if (access(dupelist[x]->d_name, W_OK) != 0) #endif { fprintf(stderr, "warning: link target is a read-only file, not linking:\n-//-> "); fwprint(stderr, dupelist[x]->d_name, 1); continue; } /* Check file pairs for modification before linking */ /* Safe linking: don't actually delete until the link succeeds */ i = file_has_changed(srcfile); if (i) { fprintf(stderr, "warning: source file modified since scanned; changing source file:\n[SRC] "); fwprint(stderr, dupelist[x]->d_name, 1); LOUD(fprintf(stderr, "file_has_changed: %d\n", i);) srcfile = dupelist[x]; continue; } if (file_has_changed(dupelist[x])) { fprintf(stderr, "warning: target file modified since scanned, not linking:\n-//-> "); fwprint(stderr, dupelist[x]->d_name, 1); continue; } #ifdef ON_WINDOWS /* For Windows, the hard link count maximum is 1023 (+1); work around * by skipping linking or changing the link source file as needed */ if (win_stat(srcfile->d_name, &ws) != 0) { fprintf(stderr, "warning: win_stat() on source file failed, changing source file:\n[SRC] "); fwprint(stderr, dupelist[x]->d_name, 1); srcfile = dupelist[x]; continue; } if (ws.nlink >= 1024) { fprintf(stderr, "warning: maximum source link count reached, changing source file:\n[SRC] "); srcfile = dupelist[x]; continue; } if (win_stat(dupelist[x]->d_name, &ws) != 0) continue; if (ws.nlink >= 1024) { fprintf(stderr, "warning: maximum destination link count reached, skipping:\n-//-> "); fwprint(stderr, dupelist[x]->d_name, 1); continue; } #endif /* Make sure the name will fit in the buffer before trying */ name_len = strlen(dupelist[x]->d_name) + 14; if (name_len > PATHBUF_SIZE) continue; /* Assemble a temporary file name */ strcpy(temp_path, dupelist[x]->d_name); strcat(temp_path, ".__jdupes__.tmp"); /* Rename the source file to the temporary name */ #ifdef UNICODE if (!M2W(temp_path, wname2)) { fprintf(stderr, "error: MultiByteToWideChar failed: "); fwprint(stderr, srcfile->d_name, 1); continue; } i = MoveFile(wname, wname2) ? 0 : 1; #else i = rename(dupelist[x]->d_name, temp_path); #endif if (i != 0) { fprintf(stderr, "warning: cannot move link target to a temporary name, not linking:\n-//-> "); fwprint(stderr, dupelist[x]->d_name, 1); /* Just in case the rename succeeded yet still returned an error, roll back the rename */ #ifdef UNICODE MoveFile(wname2, wname); #else rename(temp_path, dupelist[x]->d_name); #endif continue; } /* Create the desired hard link with the original file's name */ errno = 0; success = 0; #ifdef ON_WINDOWS #ifdef UNICODE if (!M2W(srcfile->d_name, wname2)) { fprintf(stderr, "error: MultiByteToWideChar failed: "); fwprint(stderr, srcfile->d_name, 1); continue; } if (CreateHardLinkW((LPCWSTR)wname, (LPCWSTR)wname2, NULL) == TRUE) success = 1; #else if (CreateHardLink(dupelist[x]->d_name, srcfile->d_name, NULL) == TRUE) success = 1; #endif #else if (hard) { if (link(srcfile->d_name, dupelist[x]->d_name) == 0) success = 1; #ifdef NO_SYMLINKS } #else } else { i = make_relative_link_name(srcfile->d_name, dupelist[x]->d_name, rel_path); LOUD(fprintf(stderr, "symlink GRN: %s to %s = %s\n", srcfile->d_name, dupelist[x]->d_name, rel_path)); if (i < 0) { fprintf(stderr, "warning: make_relative_link_name() failed (%d)\n", i); } else if (i == 1) { fprintf(stderr, "warning: files to be linked have the same canonical path; not linking\n"); } else if (symlink(rel_path, dupelist[x]->d_name) == 0) success = 1; } #endif /* NO_SYMLINKS */ #endif /* ON_WINDOWS */ if (success) { if (!ISFLAG(flags, F_HIDEPROGRESS)) { printf("%s ", hard ? "---->" : "-@@->"); fwprint(stdout, dupelist[x]->d_name, 1); } } else { /* The link failed. Warn the user and put the link target back */ if (!ISFLAG(flags, F_HIDEPROGRESS)) { printf("-//-> "); fwprint(stdout, dupelist[x]->d_name, 1); } fprintf(stderr, "warning: unable to link '"); fwprint(stderr, dupelist[x]->d_name, 0); fprintf(stderr, "' -> '"); fwprint(stderr, srcfile->d_name, 0); fprintf(stderr, "': %s\n", strerror(errno)); #ifdef UNICODE if (!M2W(temp_path, wname2)) { fprintf(stderr, "error: MultiByteToWideChar failed: "); fwprint(stderr, temp_path, 1); continue; } i = MoveFile(wname2, wname) ? 0 : 1; #else i = rename(temp_path, dupelist[x]->d_name); #endif if (i != 0) { fprintf(stderr, "error: cannot rename temp file back to original\n"); fprintf(stderr, "original: "); fwprint(stderr, dupelist[x]->d_name, 1); fprintf(stderr, "current: "); fwprint(stderr, temp_path, 1); } continue; } /* Remove temporary file to clean up; if we can't, reverse the linking */ #ifdef UNICODE if (!M2W(temp_path, wname2)) { fprintf(stderr, "error: MultiByteToWideChar failed: "); fwprint(stderr, temp_path, 1); continue; } i = DeleteFile(wname2) ? 0 : 1; #else i = remove(temp_path); #endif if (i != 0) { /* If the temp file can't be deleted, there may be a permissions problem * so reverse the process and warn the user */ fprintf(stderr, "\nwarning: can't delete temp file, reverting: "); fwprint(stderr, temp_path, 1); #ifdef UNICODE i = DeleteFile(wname) ? 0 : 1; #else i = remove(dupelist[x]->d_name); #endif /* This last error really should not happen, but we can't assume it won't */ if (i != 0) fprintf(stderr, "\nwarning: couldn't remove link to restore original file\n"); else { #ifdef UNICODE i = MoveFile(wname2, wname) ? 0 : 1; #else i = rename(temp_path, dupelist[x]->d_name); #endif if (i != 0) { fprintf(stderr, "\nwarning: couldn't revert the file to its original name\n"); fprintf(stderr, "original: "); fwprint(stderr, dupelist[x]->d_name, 1); fprintf(stderr, "current: "); fwprint(stderr, temp_path, 1); } } } } if (!ISFLAG(flags, F_HIDEPROGRESS)) printf("\n"); } files = files->next; } free(dupelist); return; } #endif /* NO_HARDLINKS */ jdupes-1.9/act_linkfiles.h000066400000000000000000000005271321117252000156330ustar00rootroot00000000000000/* jdupes action for hard and soft file linking * This file is part of jdupes; see jdupes.c for license information */ #ifndef ACT_LINKFILES_H #define ACT_LINKFILES_H #ifdef __cplusplus extern "C" { #endif #include "jdupes.h" extern void linkfiles(file_t *files, const int hard); #ifdef __cplusplus } #endif #endif /* ACT_LINKFILES_H */ jdupes-1.9/act_printmatches.c000066400000000000000000000020371321117252000163450ustar00rootroot00000000000000/* Print matched file sets * This file is part of jdupes; see jdupes.c for license information */ #include #include #include #include "jdupes.h" #include "jody_win_unicode.h" #include "act_printmatches.h" extern void printmatches(file_t * restrict files) { file_t * restrict tmpfile; int printed = 0; LOUD(fprintf(stderr, "act_printmatches: %p\n", files)); while (files != NULL) { if (ISFLAG(files->flags, F_HAS_DUPES)) { printed = 1; if (!ISFLAG(flags, F_OMITFIRST)) { if (ISFLAG(flags, F_SHOWSIZE)) printf("%" PRIdMAX " byte%c each:\n", (intmax_t)files->size, (files->size != 1) ? 's' : ' '); fwprint(stdout, files->d_name, 1); } tmpfile = files->duplicates; while (tmpfile != NULL) { fwprint(stdout, tmpfile->d_name, 1); tmpfile = tmpfile->duplicates; } if (files->next != NULL) fwprint(stdout, "", 1); } files = files->next; } if (printed == 0) fwprint(stderr, "No duplicates found.", 1); return; } jdupes-1.9/act_printmatches.h000066400000000000000000000005471321117252000163560ustar00rootroot00000000000000/* jdupes action for printing matched file sets to stdout * This file is part of jdupes; see jdupes.c for license information */ #ifndef ACT_PRINTMATCHES_H #define ACT_PRINTMATCHES_H #ifdef __cplusplus extern "C" { #endif #include "jdupes.h" extern void printmatches(file_t * restrict files); #ifdef __cplusplus } #endif #endif /* ACT_PRINTMATCHES_H */ jdupes-1.9/act_summarize.c000066400000000000000000000021231321117252000156540ustar00rootroot00000000000000/* Print summary of match statistics to stdout * This file is part of jdupes; see jdupes.c for license information */ #include #include #include #include "jdupes.h" #include "act_summarize.h" extern void summarizematches(const file_t * restrict files) { unsigned int numsets = 0; off_t numbytes = 0; int numfiles = 0; while (files != NULL) { file_t *tmpfile; if (ISFLAG(files->flags, F_HAS_DUPES)) { numsets++; tmpfile = files->duplicates; while (tmpfile != NULL) { numfiles++; numbytes += files->size; tmpfile = tmpfile->duplicates; } } files = files->next; } if (numsets == 0) printf("No duplicates found.\n"); else { printf("%d duplicate files (in %d sets), occupying ", numfiles, numsets); if (numbytes < 1000) printf("%" PRIdMAX " byte%c\n", (intmax_t)numbytes, (numbytes != 1) ? 's' : ' '); else if (numbytes <= 1000000) printf("%" PRIdMAX " KB\n", (intmax_t)(numbytes / 1000)); else printf("%" PRIdMAX " MB\n", (intmax_t)(numbytes / 1000000)); } return; } jdupes-1.9/act_summarize.h000066400000000000000000000005571321117252000156720ustar00rootroot00000000000000/* jdupes action for printing a summary of match stats to stdout * This file is part of jdupes; see jdupes.c for license information */ #ifndef ACT_SUMMARIZE_H #define ACT_SUMMARIZE_H #ifdef __cplusplus extern "C" { #endif #include "jdupes.h" extern void summarizematches(const file_t * restrict files); #ifdef __cplusplus } #endif #endif /* ACT_SUMMARIZE_H */ jdupes-1.9/chroot_build.sh000077500000000000000000000042201321117252000156610ustar00rootroot00000000000000#!/bin/sh # Jody's generic chroot build script # Version 1.0 ARCHES="i386 x86-64 uclibc-i386 uclibc-x86-64" test -z "$NAME" && NAME="$(basename "$(pwd)")" test -e "version.h" && VER="$(grep '#define VER ' version.h | tr -d \\\" | cut -d' ' -f3)" test -z "$VER" && VER=0 export NAME export VER export CHROOT_BASE=/chroots export WD="$(pwd)" export PKG="pkg" echo "chroot builder: building '$NAME' version '$VER'" trap clean_exit INT QUIT ABRT HUP clean_exit () { umount $CHROOT/proc $CHROOT/sys $CHROOT/tmp $CHROOT/dev $CHROOT/usr/src $CHROOT/home } do_build () { test -z "$WD" && echo "WD not set, aborting" && exit 1 test -z "$PKG" && echo "PKG not set, aborting" && exit 1 make clean if ! make -j$JOBS all then echo "Build failed"; exit 1 else echo "WD/PKG: $WD/$PKG" test -d $WD/$PKG && rm -rf $WD/$PKG mkdir $WD/$PKG make DESTDIR=$WD/$PKG install && \ tar -C pkg -c usr | xz -e > ${NAME}_$VER-$ARCH.pkg.tar.xz echo "Built ${NAME}_$VER-$ARCH.pkg.tar.xz" fi } if [ "$(id -u)" != "0" ] then echo "You must be root to auto-build chroot packages." exit 1 fi if [ "$DO_CHROOT_BUILD" = "1" ] then test -z "$1" && echo "No arch specified" && exit 1 test ! -d "$1" && echo "Not a directory: $1" && exit 1 cd $1 export WD="$1" do_build echo "finished: $1" exit else echo baz export DO_CHROOT_BUILD=1 for ARCH in $ARCHES do export ARCH export CHROOT="$CHROOT_BASE/$ARCH" test ! -d $CHROOT && echo "$CHROOT not present, not building $ARCH package." && continue echo "Performing package build for $CHROOT" test ! -x $CHROOT/bin/sh && echo "$CHROOT does not seem to be a chroot; aborting." && exit 1 mount --bind /dev $CHROOT/dev || clean_exit mount --bind /usr/src $CHROOT/usr/src || clean_exit mount --bind /home $CHROOT/home || clean_exit mount -t proc proc $CHROOT/proc || clean_exit mount -t sysfs sysfs $CHROOT/sys || clean_exit mount -t tmpfs tmpfs $CHROOT/tmp || clean_exit if echo "$ARCH" | grep -q "i386" then linux32 chroot $CHROOT $WD/$0 $WD else chroot $CHROOT $WD/$0 $WD fi umount $CHROOT/proc $CHROOT/sys $CHROOT/tmp $CHROOT/dev $CHROOT/usr/src $CHROOT/home test -d $WD/$PKG && rm -rf $WD/$PKG done fi jdupes-1.9/compare_jdupes.sh000077500000000000000000000017701321117252000162130ustar00rootroot00000000000000#!/bin/sh # Runs the installed *dupes* binary and the built binary and compares # the output for sameness. Also displays timing statistics. ERR=0 # Detect installed program type (fdupes or jdupes) ORIG_DUPE=false jdupes -v 2>/dev/null >/dev/null && ORIG_DUPE=jdupes fdupes-jody -v 2>/dev/null >/dev/null && ORIG_DUPE=fdupes-jody fdupes -v 2>/dev/null >/dev/null && ORIG_DUPE=fdupes test ! -z "$WINDIR" && "$WINDIR/jdupes.exe" -v 2>/dev/null >/dev/null && ORIG_DUPE="$WINDIR/jdupes.exe" if [ ! $ORIG_DUPE -v 2>/dev/null >/dev/null ] then echo "Cannot run installed jdupes, fdupes-jody, or fdupes" exit 1 fi test ! -e ./jdupes && echo "Build jdupes first, silly" && exit 1 echo -n "Installed $ORIG_DUPE:" sync time $ORIG_DUPE -nrq "$@" > installed_output.txt || ERR=1 echo -en "\nBuilt jdupes:" sync time ./jdupes -nrq "$@" > built_output.txt || ERR=1 diff -Nau installed_output.txt built_output.txt rm -f installed_output.txt built_output.txt test "$ERR" != "0" && echo "Errors were returned during execution" jdupes-1.9/fdupes_oneline.sh000077500000000000000000000003221321117252000162020ustar00rootroot00000000000000#!/bin/sh # Emulates fdupes -1 output # Usage: jdupes command line | ./fdupes_oneline.sh while read LINE do if [ -z "$LINE" ] then echo else echo -n "$LINE" | sed 's/ /\\ /g'; echo -n " " fi done jdupes-1.9/jdupes.1000066400000000000000000000174501321117252000142320ustar00rootroot00000000000000.TH FDUPES 1 .\" NAME should be all caps, SECTION should be 1-8, maybe w/ subsection .\" other parms are allowed: see man(7), man(1) .SH NAME jdupes \- finds and performs actions upon duplicate files .SH SYNOPSIS .B jdupes [ .I options ] .I DIRECTORY \|.\|.\|. .SH "DESCRIPTION" Searches the given path(s) for duplicate files. Such files are found by comparing file sizes, then partial and full file hashes, followed by a byte-by-byte comparison. .SH OPTIONS .TP .B -@ --loud output annoying low-level debug info while running .TP .B -1 --one-file-system do not match files that are on different filesystems or devices .TP .B -A --nohidden exclude hidden files from consideration .TP .B -B --dedupe issue the btrfs same-extents ioctl to trigger a deduplication on disk. The program must be built with btrfs support for this option to be available .TP .B -D --debug if this feature is compiled in, show debugging statistics and info at the end of program execution .TP .B -d --delete prompt user for files to preserve, deleting all others (see .B CAVEATS below) .TP .B -f --omitfirst omit the first file in each set of matches .TP .B -H --hardlinks normally, when two or more files point to the same disk area they are treated as non-duplicates; this option will change this behavior .TP .B -h --help displays help .TP .B -i --reverse reverse (invert) the sort order of matches .TP .B -I --isolate isolate each command-line parameter from one another; only match if the files are under different parameter specifications .TP .B -L --linkhard replace all duplicate files with hardlinks to the first file in each set of duplicates .TP .B -m --summarize summarize duplicate files information .TP .B -N --noprompt when used together with \-\-delete, preserve the first file in each set of duplicates and delete the others without prompting the user .TP .B -n --noempty exclude zero-length files from consideration; this option is the default behavior and does nothing (also see \fB\-z/--zeromatch\fP) .TP .B -O --paramorder parameter order preservation is more important than the chosen sort; this is particularly useful with the \fB\-N\fP option to ensure that automatic deletion behaves in a controllable way .TP .B -o --order\fR=\fIWORD\fR order files according to WORD: time - sort by modification time name - sort by filename (default) .TP .B -p --permissions don't consider files with different owner/group or permission bits as duplicates .TP .B -Q --quick .B [WARNING: RISK OF DATA LOSS, SEE CAVEATS] skip byte-for-byte verification of duplicate pairs (use hashes only) .TP .B -q --quiet hide progress indicator .TP .B -R --recurse: for each directory given after this option follow subdirectories encountered within (note the ':' at the end of option; see the Examples section below for further explanation) .TP .B -r --recurse for every directory given follow subdirectories encountered within .TP .B -l --linksoft replace all duplicate files with symlinks to the first file in each set of duplicates .TP .B -S --size show size of duplicate files .TP .B -s --symlinks follow symlinked directories .TP .B -v --version display jdupes version and compilation feature flags .TP .B -x --xsize=[+]SIZE (NOTE: deprecated in favor of \-X) exclude files of size less than SIZE from consideration, or if SIZE is prefixed with a '+' i.e. jdupes -x +226 [files] then exclude files larger than SIZE. Suffixes K/M/G can be used. .TP .B -X --exclude=spec:info exclude files based on specified criteria; supported specs are: .RS .IP `size[+-=]:number[suffix]' Match only if size is greater (+), less than (-), or equal to (=) the specified number, with an optional multiplier suffix. The +/- and = specifiers can be combined; ex :"size+=4K" will match if size is greater than or equal to four kilobytes (4096 bytes). Suffixes supported are K/M/G/T/P/E with a B or iB extension (all case-insensitive); no extension or an IB extension specify binary multipliers while a B extension specifies decimal multipliers (ex: 4K or 4KiB = 4096, 4KB = 4000.) .RE .TP .B -z --zeromatch consider zero-length files to be duplicates; this replaces the old default behavior when \fB\-n\fP was not specified .TP .B -Z --softabort if the user aborts the program (as with CTRL-C) act on the matches that were found before the abort was received. For example, if -L and -Z are specified, all matches found prior to the abort will be hard linked. The default behavior without -Z is to abort without taking any actions. .SH NOTES A set of arrows are used in hard linking to show what action was taken on each link candidate. These arrows are as follows: .TP .B ----> This file was successfully hard linked to the first file in the duplicate chain .TP .B -@@-> This file was successfully symlinked to the first file in the chain .TP .B -==-> This file was already a hard link to the first file in the chain .TP .B -//-> Linking this file failed due to an error during the linking process .PP Duplicate files are listed together in groups with each file displayed on a separate line. The groups are then separated from each other by blank lines. .SH EXAMPLES .TP .B jdupes a --recurse: b will follow subdirectories under b, but not those under a. .TP .B jdupes a --recurse b will follow subdirectories under both a and b. .TP .B jdupes -O dir1 dir3 dir2 will always place 'dir1' results first in any match set (where relevant) .SH CAVEATS Using .B \-1 or .BR \-\-one\-file\-system prevents matches that cross filesystems, but a more relaxed form of this option may be added that allows cross-matching for all filesystems that each parameter is present on. When using .B \-d or .BR \-\-delete , care should be taken to insure against accidental data loss. .B \-Z or .BR \-\-softabort used to be --hardabort in jdupes prior to v1.5 and had the opposite behavior. Defaulting to taking action on abort is probably not what most users would expect. The decision to invert rather than reassign to a different switch was made because this feature was still fairly new at the time of the change. The .B \-O or .BR \-\-paramorder option allows the user greater control over what appears in the first position of a match set, specifically for keeping the \fB\-N\fP option from deleting all but one file in a set in a seemingly random way. All directories specified on the command line will be used as the sorting order of result sets first, followed by the sorting algorithm set by the \fB\-o\fP or \fB\-\-order\fP option. This means that the order of all match pairs for a single directory specification will retain the old sorting behavior even if this option is specified. When used together with options .B \-s or .BR \-\-symlink , a user could accidentally preserve a symlink while deleting the file it points to. Furthermore, when specifying a particular directory more than once, all files within that directory will be listed as their own duplicates, leading to data loss should a user preserve a file without its "duplicate" (the file itself!). The .B \-Q or .BR \-\-quick option only reads each file once, hashes it, and performs comparisons based solely on the hashes. There is a small but significant risk of a hash collision which is the purpose of the failsafe byte-for-byte comparison that this option explicitly bypasses. Do not use it on ANY data set for which any amount of data loss is unacceptable. This option is not included in the help text for the program due to its risky nature. .B You have been warned! .SH REPORTING BUGS Send all bug reports to jody@jodybruchon.com or use the Issue tracker at http://github.com/jbruchon/jdupes/issues .SH AUTHOR jdupes is a fork of 'fdupes' which is maintained by and contains extra code copyrighted by Jody Bruchon jdupes was once called 'fdupes-jody' but the name was changed at the request of Adrian Lopez to avoid confusion between the two programs. Based on 'fdupes' created by Adrian Lopez jdupes-1.9/jdupes.c000066400000000000000000002047621321117252000143200ustar00rootroot00000000000000/* jdupes (C) 2015-2017 Jody Bruchon Derived from fdupes (C) 1999-2017 Adrian Lopez Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ #include #include #include #include #include #include #include #include #include #include #include #include #ifndef OMIT_GETOPT_LONG #include #endif #include #include #include #include #include "jdupes.h" #include "string_malloc.h" #include "jody_hash.h" #include "jody_sort.h" #include "jody_win_unicode.h" #include "jody_cacheinfo.h" #include "version.h" /* Headers for post-scanning actions */ #include "act_deletefiles.h" #include "act_dedupefiles.h" #include "act_linkfiles.h" #include "act_printmatches.h" #include "act_summarize.h" /* Detect Windows and modify as needed */ #if defined _WIN32 || defined __CYGWIN__ const char dir_sep = '\\'; #ifdef UNICODE const wchar_t *FILE_MODE_RO = L"rbS"; #else const char *FILE_MODE_RO = "rbS"; #endif /* UNICODE */ #else /* Not Windows */ const char *FILE_MODE_RO = "rb"; const char dir_sep = '/'; #ifdef UNICODE #error Do not define UNICODE on non-Windows platforms. #undef UNICODE #endif #endif /* _WIN32 || __CYGWIN__ */ /* Windows + Unicode compilation */ #ifdef UNICODE wchar_t wname[PATH_MAX]; wchar_t wname2[PATH_MAX]; wchar_t wstr[PATH_MAX]; int out_mode = _O_TEXT; int err_mode = _O_TEXT; #define M2W(a,b) MultiByteToWideChar(CP_UTF8, 0, a, -1, (LPWSTR)b, PATH_MAX) #define W2M(a,b) WideCharToMultiByte(CP_UTF8, 0, a, -1, (LPSTR)b, PATH_MAX, NULL, NULL) #endif /* UNICODE */ #ifndef NO_SYMLINKS #include "jody_paths.h" #endif /* Behavior modification flags */ uint_fast32_t flags = 0; static const char *program_name; /* This gets used in many functions */ #ifdef ON_WINDOWS struct winstat ws; #else struct stat s; #endif /* Larger chunk size makes large files process faster but uses more RAM */ #ifndef CHUNK_SIZE #define CHUNK_SIZE 32768 #endif #ifndef PARTIAL_HASH_SIZE #define PARTIAL_HASH_SIZE 4096 #endif static size_t auto_chunk_size; /* Maximum path buffer size to use; must be large enough for a path plus * any work that might be done to the array it's stored in. PATH_MAX is * not always true. Read this article on the false promises of PATH_MAX: * http://insanecoding.blogspot.com/2007/11/pathmax-simply-isnt.html * Windows + Unicode needs a lot more space than UTF-8 in Linux/Mac OS X */ #ifndef PATHBUF_SIZE #define PATHBUF_SIZE 4096 #endif /* Refuse to build if PATHBUF_SIZE is too small */ #if PATHBUF_SIZE < PATH_MAX #error "PATHBUF_SIZE can't be less than PATH_MAX" #endif #ifndef INITIAL_DEPTH_THRESHOLD #define INITIAL_DEPTH_THRESHOLD 8 #endif /* For interactive deletion input */ #define INPUT_SIZE 512 /* Size suffixes - this gets exported */ const struct size_suffix size_suffix[] = { /* Byte (someone may actually try to use this) */ { "b", 1 }, { "k", 1024 }, { "kib", 1024 }, { "m", 1048576 }, { "mib", 1048576 }, { "g", (uint64_t)1048576 * 1024 }, { "gib", (uint64_t)1048576 * 1024 }, { "t", (uint64_t)1048576 * 1048576 }, { "tib", (uint64_t)1048576 * 1048576 }, { "p", (uint64_t)1048576 * 1048576 * 1024}, { "pib", (uint64_t)1048576 * 1048576 * 1024}, { "e", (uint64_t)1048576 * 1048576 * 1048576}, { "eib", (uint64_t)1048576 * 1048576 * 1048576}, /* Decimal suffixes */ { "kb", 1000 }, { "mb", 1000000 }, { "gb", 1000000000 }, { "tb", 1000000000000 }, { "pb", 1000000000000000 }, { "eb", 1000000000000000000 }, { NULL, 0 }, }; /* Assemble extension string from compile-time options */ static const char *extensions[] = { #ifdef ON_WINDOWS "windows", #endif #ifdef UNICODE "unicode", #endif #ifdef OMIT_GETOPT_LONG "nolong", #endif #ifdef __FAST_MATH__ "fastmath", #endif #ifdef DEBUG "debug", #endif #ifdef LOUD_DEBUG "loud", #endif #ifdef ENABLE_BTRFS "btrfs", #endif #ifdef LOW_MEMORY "lowmem", #endif #ifdef SMA_PAGE_SIZE "smapage", #endif #if JODY_HASH_WIDTH == 32 "hash32", #endif #if JODY_HASH_WIDTH == 16 "hash16", #endif #ifdef NO_PERMS "noperm", #endif #ifdef NO_SYMLINKS "nosymlink", #endif #ifdef USE_TREE_REBALANCE "rebal", #endif #ifdef CONSIDER_IMBALANCE "ci", #endif #ifdef BALANCE_THRESHOLD "bt", #endif NULL }; /* Tree to track each directory traversed */ struct travdone { struct travdone *left; struct travdone *right; jdupes_ino_t inode; dev_t device; }; static struct travdone *travdone_head = NULL; /* Exclusion tree head and static tag list */ struct exclude *exclude_head = NULL; const struct exclude_tags exclude_tags[] = { { "dir", X_DIR }, { "size+", X_SIZE_GT }, { "size+=", X_SIZE_GTEQ }, { "size-=", X_SIZE_LTEQ }, { "size-", X_SIZE_LT }, { "size=", X_SIZE_EQ }, { NULL, 0 }, }; /* Required for progress indicator code */ static uintmax_t filecount = 0; static uintmax_t progress = 0, item_progress = 0, dupecount = 0; /* Number of read loops before checking progress indicator */ #define CHECK_MINIMUM 256 /* Hash/compare performance statistics (debug mode) */ #ifdef DEBUG static unsigned int small_file = 0, partial_hash = 0, partial_elim = 0; static unsigned int full_hash = 0, partial_to_full = 0, hash_fail = 0; static uintmax_t comparisons = 0; static unsigned int left_branch = 0, right_branch = 0; #ifdef ON_WINDOWS #ifndef NO_HARDLINKS static unsigned int hll_exclude = 0; #endif #endif #endif /* DEBUG */ #ifdef TREE_DEPTH_STATS static unsigned int tree_depth = 0; static unsigned int max_depth = 0; #endif /* File tree head */ static filetree_t *checktree = NULL; /* Directory/file parameter position counter */ static unsigned int user_item_count = 1; /* registerfile() direction options */ enum tree_direction { NONE, LEFT, RIGHT }; /* Sort order reversal */ static int sort_direction = 1; /* Signal handler */ static int interrupt = 0; /* Progress indicator time */ struct timeval time1, time2; /***** End definitions, begin code *****/ /* Catch CTRL-C and either notify or terminate */ void sighandler(const int signum) { (void)signum; if (interrupt || !ISFLAG(flags, F_SOFTABORT)) { fprintf(stderr, "\n"); string_malloc_destroy(); exit(EXIT_FAILURE); } interrupt = 1; return; } /* Out of memory */ extern void oom(const char * const restrict msg) { fprintf(stderr, "\nout of memory: %s\n", msg); string_malloc_destroy(); exit(EXIT_FAILURE); } /* Null pointer failure */ extern void nullptr(const char * restrict func) { static const char n[] = "(NULL)"; if (func == NULL) func = n; fprintf(stderr, "\ninternal error: NULL pointer passed to %s\n", func); string_malloc_destroy(); exit(EXIT_FAILURE); } /* Compare two jody_hashes like memcmp() */ #define HASH_COMPARE(a,b) ((a > b) ? 1:((a == b) ? 0:-1)) static inline char **cloneargs(const int argc, char **argv) { static int x; static char **args; args = (char **)string_malloc(sizeof(char *) * (unsigned int)argc); if (args == NULL) oom("cloneargs() start"); for (x = 0; x < argc; x++) { args[x] = (char *)string_malloc(strlen(argv[x]) + 1); if (args[x] == NULL) oom("cloneargs() loop"); strcpy(args[x], argv[x]); } return args; } static int findarg(const char * const arg, const int start, const int argc, char **argv) { int x; for (x = start; x < argc; x++) if (strcmp(argv[x], arg) == 0) return x; return x; } /* Find the first non-option argument after specified option. */ static int nonoptafter(const char *option, const int argc, char **oldargv, char **newargv) { int x; int targetind; int testind; int startat = 1; targetind = findarg(option, 1, argc, oldargv); for (x = optind; x < argc; x++) { testind = findarg(newargv[x], startat, argc, oldargv); if (testind > targetind) return x; else startat = testind; } return x; } /* Update progress indicator if requested */ static void update_progress(const char * const restrict msg, const int file_percent) { static int did_fpct = 0; /* The caller should be doing this anyway...but don't trust that they did */ if (ISFLAG(flags, F_HIDEPROGRESS)) return; gettimeofday(&time2, NULL); if (progress == 0 || time2.tv_sec > time1.tv_sec) { fprintf(stderr, "\rProgress [%" PRIuMAX "/%" PRIuMAX ", %" PRIuMAX " pairs matched] %" PRIuMAX "%%", progress, filecount, dupecount, (progress * 100) / filecount); if (file_percent > -1 && msg != NULL) { fprintf(stderr, " (%s: %d%%) ", msg, file_percent); did_fpct = 1; } else if (did_fpct != 0) { fprintf(stderr, " "); did_fpct = 0; } fflush(stderr); } time1.tv_sec = time2.tv_sec; return; } /* Check file's stat() info to make sure nothing has changed * Returns 1 if changed, 0 if not changed, negative if error */ extern int file_has_changed(file_t * const restrict file) { if (file == NULL || file->d_name == NULL) nullptr("file_has_changed()"); LOUD(fprintf(stderr, "file_has_changed('%s')\n", file->d_name);) if (!ISFLAG(file->flags, F_VALID_STAT)) return -66; #ifdef ON_WINDOWS int i; if ((i = win_stat(file->d_name, &ws)) != 0) return i; if (file->inode != ws.inode) return 1; if (file->size != ws.size) return 1; if (file->device != ws.device) return 1; if (file->mtime != ws.mtime) return 1; if (file->mode != ws.mode) return 1; #else if (stat(file->d_name, &s) != 0) return -2; if (file->inode != s.st_ino) return 1; if (file->size != s.st_size) return 1; if (file->device != s.st_dev) return 1; if (file->mtime != s.st_mtime) return 1; if (file->mode != s.st_mode) return 1; #ifndef NO_PERMS if (file->uid != s.st_uid) return 1; if (file->gid != s.st_gid) return 1; #endif #ifndef NO_SYMLINKS if (lstat(file->d_name, &s) != 0) return -3; if ((S_ISLNK(s.st_mode) > 0) ^ ISFLAG(file->flags, F_IS_SYMLINK)) return 1; #endif #endif /* ON_WINDOWS */ return 0; } extern inline int getfilestats(file_t * const restrict file) { if (file == NULL || file->d_name == NULL) nullptr("getfilestats()"); LOUD(fprintf(stderr, "getfilestats('%s')\n", file->d_name);) /* Don't stat the same file more than once */ if (ISFLAG(file->flags, F_VALID_STAT)) return 0; SETFLAG(file->flags, F_VALID_STAT); #ifdef ON_WINDOWS if (win_stat(file->d_name, &ws) != 0) return -1; file->inode = ws.inode; file->size = ws.size; file->device = ws.device; file->mtime = ws.mtime; file->mode = ws.mode; #ifndef NO_HARDLINKS file->nlink = ws.nlink; #endif #else if (stat(file->d_name, &s) != 0) return -1; file->inode = s.st_ino; file->size = s.st_size; file->device = s.st_dev; file->mtime = s.st_mtime; file->mode = s.st_mode; #ifndef NO_HARDLINKS file->nlink = s.st_nlink; #endif #ifndef NO_PERMS file->uid = s.st_uid; file->gid = s.st_gid; #endif #ifndef NO_SYMLINKS if (lstat(file->d_name, &s) != 0) return -1; if (S_ISLNK(s.st_mode) > 0) SETFLAG(file->flags, F_IS_SYMLINK); #endif #endif /* ON_WINDOWS */ return 0; } static void add_exclude(const char *option) { char *opt, *p; struct exclude *excl = exclude_head; const struct exclude_tags *tags = exclude_tags; const struct size_suffix *ss = size_suffix; if (option == NULL) nullptr("add_exclude()"); LOUD(fprintf(stderr, "add_exclude '%s'\n", option);) opt = string_malloc(strlen(option) + 1); if (opt == NULL) oom("add_exclude option"); strcpy(opt, option); p = opt; while (*p != ':' && *p != '\0') p++; /* Split tag string into *opt (tag) and *p (value) */ if (*p == ':') { *p = '\0'; p++; } while (tags->tag != NULL && strcmp(tags->tag, opt) != 0) tags++; if (tags->tag == NULL) goto bad_tag; /* Check for a tag that requires a value */ if (tags->flags & XX_EXCL_DATA && *p == '\0') goto spec_missing; /* *p is now at the value, NOT the tag string! */ if (exclude_head != NULL) { /* Add to end of exclusion stack if head is present */ while (excl->next != NULL) excl = excl->next; excl->next = string_malloc(sizeof(struct exclude) + strlen(p)); if (excl->next == NULL) oom("add_exclude alloc"); excl = excl->next; } else { /* Allocate exclude_head if no exclusions exist yet */ exclude_head = string_malloc(sizeof(struct exclude) + strlen(p)); if (exclude_head == NULL) oom("add_exclude alloc"); excl = exclude_head; } /* Set tag value from predefined tag array */ excl->flags = tags->flags; /* Initialize the new exclude element */ excl->next = NULL; if (excl->flags & XX_EXCL_OFFSET) { /* Exclude uses a number; handle it with possible suffixes */ *(excl->param) = '\0'; /* Get base size */ if (*p < '0' || *p > '9') goto bad_size_suffix; excl->size = strtoll(p, &p, 10); /* Handle suffix, if any */ if (*p != '\0') { while (ss->suffix != NULL && strcasecmp(ss->suffix, p) != 0) ss++; if (ss->suffix == NULL) goto bad_size_suffix; excl->size *= ss->multiplier; } } else { /* Exclude uses string data; just copy it */ excl->size = 0; strcpy(excl->param, p); } LOUD(fprintf(stderr, "Added exclude: tag '%s', data '%s', size %lld, flags %d\n", opt, excl->param, (long long)excl->size, excl->flags);) string_free(opt); return; spec_missing: fprintf(stderr, "Exclude spec missing or invalid: -X spec:data\n"); exit(EXIT_FAILURE); bad_tag: fprintf(stderr, "Invalid exclusion tag was specified\n"); exit(EXIT_FAILURE); bad_size_suffix: fprintf(stderr, "Invalid -X size suffix specified; use B or KMGTPE[i][B]\n"); exit(EXIT_FAILURE); } extern int getdirstats(const char * const restrict name, jdupes_ino_t * const restrict inode, dev_t * const restrict dev) { if (name == NULL || inode == NULL || dev == NULL) nullptr("getdirstats"); LOUD(fprintf(stderr, "getdirstats('%s', %p, %p)\n", name, (void *)inode, (void *)dev);) #ifdef ON_WINDOWS if (win_stat(name, &ws) != 0) return -1; *inode = ws.inode; *dev = ws.device; if (!S_ISDIR(ws.mode)) return 1; #else if (stat(name, &s) != 0) return -1; *inode = s.st_ino; *dev = s.st_dev; if (!S_ISDIR(s.st_mode)) return 1; #endif /* ON_WINDOWS */ return 0; } /* Check a pair of files for match exclusion conditions * Returns: * 0 if all condition checks pass * -1 or 1 on compare result less/more * -2 on an absolute exclusion condition met * 2 on an absolute match condition met */ extern int check_conditions(const file_t * const restrict file1, const file_t * const restrict file2) { if (file1 == NULL || file2 == NULL || file1->d_name == NULL || file2->d_name == NULL) nullptr("check_conditions()"); LOUD(fprintf(stderr, "check_conditions('%s', '%s')\n", file1->d_name, file2->d_name);) /* Exclude based on -I/--isolate */ if (ISFLAG(flags, F_ISOLATE) && (file1->user_order == file2->user_order)) { LOUD(fprintf(stderr, "check_conditions: files ignored: parameter isolation\n")); return -1; } /* Exclude based on -1/--one-file-system */ if (ISFLAG(flags, F_ONEFS) && (file1->device != file2->device)) { LOUD(fprintf(stderr, "check_conditions: files ignored: not on same filesystem\n")); return -1; } /* Exclude files by permissions if requested */ if (ISFLAG(flags, F_PERMISSIONS) && (file1->mode != file2->mode #ifndef NO_PERMS || file1->uid != file2->uid || file1->gid != file2->gid #endif )) { return -1; LOUD(fprintf(stderr, "check_conditions: no match: permissions/ownership differ (-p on)\n")); } /* Hard link and symlink + '-s' check */ #ifndef NO_HARDLINKS if ((file1->inode == file2->inode) && (file1->device == file2->device)) { if (ISFLAG(flags, F_CONSIDERHARDLINKS)) { LOUD(fprintf(stderr, "check_conditions: files match: hard/soft linked (-H on)\n")); return 2; } else { LOUD(fprintf(stderr, "check_conditions: files ignored: hard/soft linked (-H off)\n")); return -2; } } #endif /* Exclude files that are not the same size */ if (file1->size > file2->size) { LOUD(fprintf(stderr, "check_conditions: no match: size of file1 > file2 (%" PRIdMAX " > %" PRIdMAX ")\n", (intmax_t)file1->size, (intmax_t)file2->size)); return -1; } if (file1->size < file2->size) { LOUD(fprintf(stderr, "check_conditions: no match: size of file1 < file2 (%" PRIdMAX " < %"PRIdMAX ")\n", (intmax_t)file1->size, (intmax_t)file2->size)); return 1; } /* Fall through: all checks passed */ LOUD(fprintf(stderr, "check_conditions: all condition checks passed\n")); return 0; } /* Check for exclusion conditions for a single file (1 = fail) */ static int check_singlefile(file_t * const restrict newfile) { static char tempname[PATHBUF_SIZE * 2]; static char *tp = tempname; int excluded; if (newfile == NULL) nullptr("check_singlefile()"); LOUD(fprintf(stderr, "check_singlefile: checking '%s'\n", newfile->d_name)); /* Exclude hidden files if requested */ if (ISFLAG(flags, F_EXCLUDEHIDDEN)) { strcpy(tp, newfile->d_name); tp = basename(tp); if (tp[0] == '.' && strcmp(tp, ".") && strcmp(tp, "..")) { LOUD(fprintf(stderr, "check_singlefile: excluding hidden file (-A on)\n")); return 1; } } /* Get file information and check for validity */ const int i = getfilestats(newfile); if (i || newfile->size == -1) { LOUD(fprintf(stderr, "check_singlefile: excluding due to bad stat()\n")); return 1; } if (!S_ISDIR(newfile->mode)) { /* Exclude zero-length files if requested */ if (newfile->size == 0 && !ISFLAG(flags, F_INCLUDEEMPTY)) { LOUD(fprintf(stderr, "check_singlefile: excluding zero-length empty file (-z not set)\n")); return 1; } /* Exclude files based on exclusion stack size specs */ excluded = 0; for (struct exclude *excl = exclude_head; excl != NULL; excl = excl->next) { uint32_t sflag = excl->flags & XX_EXCL_SIZE; if ( ((sflag == X_SIZE_EQ) && (newfile->size != excl->size)) || ((sflag == X_SIZE_LTEQ) && (newfile->size <= excl->size)) || ((sflag == X_SIZE_GTEQ) && (newfile->size >= excl->size)) || ((sflag == X_SIZE_GT) && (newfile->size > excl->size)) || ((sflag == X_SIZE_LT) && (newfile->size < excl->size)) ) excluded = 1; } if (excluded) { LOUD(fprintf(stderr, "check_singlefile: excluding based on xsize limit (-x set)\n")); return 1; } } #ifdef ON_WINDOWS /* Windows has a 1023 (+1) hard link limit. If we're hard linking, * ignore all files that have hit this limit */ #ifndef NO_HARDLINKS if (ISFLAG(flags, F_HARDLINKFILES) && newfile->nlink >= 1024) { #ifdef DEBUG hll_exclude++; #endif LOUD(fprintf(stderr, "check_singlefile: excluding due to Windows 1024 hard link limit\n")); return 1; } #endif /* NO_HARDLINKS */ #endif /* ON_WINDOWS */ return 0; } static file_t *init_newfile(const size_t len, file_t * restrict * const restrict filelistp) { file_t * const restrict newfile = (file_t *)string_malloc(sizeof(file_t)); if (!newfile) oom("init_newfile() file structure"); memset(newfile, 0, sizeof(file_t)); newfile->d_name = (char *)string_malloc(len); if (!newfile->d_name) oom("init_newfile() filename"); newfile->next = *filelistp; newfile->user_order = user_item_count; newfile->size = -1; newfile->duplicates = NULL; return newfile; } /* Create a new traversal check object and initialize its values */ static struct travdone *travdone_alloc(const jdupes_ino_t inode, const dev_t device) { struct travdone *trav; LOUD(fprintf(stderr, "travdone_alloc(%" PRIdMAX ", %" PRIdMAX ")\n", (intmax_t)inode, (intmax_t)device);) trav = (struct travdone *)string_malloc(sizeof(struct travdone)); if (trav == NULL) { LOUD(fprintf(stderr, "travdone_alloc: malloc failed\n");) return NULL; } trav->left = NULL; trav->right = NULL; trav->inode = inode; trav->device = device; LOUD(fprintf(stderr, "travdone_alloc returned %p\n", (void *)trav);) return trav; } /* Add a single file to the file tree */ static inline file_t *grokfile(const char * const restrict name, file_t * restrict * const restrict filelistp) { file_t * restrict newfile; if (!name || !filelistp) nullptr("grokfile()"); LOUD(fprintf(stderr, "grokfile: '%s' %p\n", name, filelistp)); /* Allocate the file_t and the d_name entries */ newfile = init_newfile(strlen(name) + 2, filelistp); strcpy(newfile->d_name, name); /* Single-file [l]stat() and exclusion condition check */ if (check_singlefile(newfile) != 0) { LOUD(fprintf(stderr, "grokfile: check_singlefile rejected file\n")); string_free(newfile->d_name); string_free(newfile); return NULL; } return newfile; } /* Load a directory's contents into the file tree, recursing as needed */ static void grokdir(const char * const restrict dir, file_t * restrict * const restrict filelistp, int recurse) { file_t * restrict newfile; struct dirent *dirinfo; static int grokdir_level = 0; static char tempname[PATHBUF_SIZE * 2]; size_t dirlen; struct travdone *traverse; int i, single = 0; jdupes_ino_t inode, n_inode; dev_t device, n_device; #ifdef UNICODE WIN32_FIND_DATA ffd; HANDLE hFind = INVALID_HANDLE_VALUE; char *p; #else DIR *cd; #endif if (dir == NULL || filelistp == NULL) nullptr("grokdir()"); LOUD(fprintf(stderr, "grokdir: scanning '%s' (order %d, recurse %d)\n", dir, user_item_count, recurse)); /* Double traversal prevention tree */ i = getdirstats(dir, &inode, &device); if (i < 0) goto error_travdone; if (travdone_head == NULL) { travdone_head = travdone_alloc(inode, device); if (travdone_head == NULL) goto error_travdone; } else { traverse = travdone_head; while (1) { if (traverse == NULL) nullptr("grokdir() traverse"); /* Don't re-traverse directories we've already seen */ if (inode == traverse->inode && device == traverse->device) { LOUD(fprintf(stderr, "already seen item '%s', skipping\n", dir);) return; } else if (inode > traverse->inode || (inode == traverse->inode && device > traverse->device)) { /* Traverse right */ if (traverse->right == NULL) { LOUD(fprintf(stderr, "traverse item right '%s'\n", dir);) traverse->right = travdone_alloc(inode, device); if (traverse->right == NULL) goto error_travdone; break; } traverse = traverse->right; continue; } else { /* Traverse left */ if (traverse->left == NULL) { LOUD(fprintf(stderr, "traverse item left '%s'\n", dir);) traverse->left = travdone_alloc(inode, device); if (traverse->left == NULL) goto error_travdone; break; } traverse = traverse->left; continue; } } } item_progress++; grokdir_level++; /* if dir is actually a file, just add it to the file tree */ if (i == 1) { newfile = grokfile(dir, filelistp); if (newfile == NULL) { LOUD(fprintf(stderr, "grokfile rejected '%s'\n", dir)); return; } single = 1; goto add_single_file; } #ifdef UNICODE /* Windows requires \* at the end of directory names */ strncpy(tempname, dir, PATHBUF_SIZE * 2); dirlen = strlen(tempname) - 1; p = tempname + dirlen; if (*p == '/' || *p == '\\') *p = '\0'; strncat(tempname, "\\*", PATHBUF_SIZE * 2); if (!M2W(tempname, wname)) goto error_cd; LOUD(fprintf(stderr, "FindFirstFile: %s\n", dir)); hFind = FindFirstFile((LPCWSTR)wname, &ffd); if (hFind == INVALID_HANDLE_VALUE) { LOUD(fprintf(stderr, "\nfile handle bad\n")); goto error_cd; } LOUD(fprintf(stderr, "Loop start\n")); do { char * restrict tp = tempname; size_t d_name_len; /* Get necessary length and allocate d_name */ dirinfo = (struct dirent *)string_malloc(sizeof(struct dirent)); if (!W2M(ffd.cFileName, dirinfo->d_name)) continue; #else cd = opendir(dir); if (!cd) goto error_cd; while ((dirinfo = readdir(cd)) != NULL) { char * restrict tp = tempname; size_t d_name_len; #endif /* UNICODE */ LOUD(fprintf(stderr, "grokdir: readdir: '%s'\n", dirinfo->d_name)); if (!strcmp(dirinfo->d_name, ".") || !strcmp(dirinfo->d_name, "..")) continue; if (!ISFLAG(flags, F_HIDEPROGRESS)) { gettimeofday(&time2, NULL); if (progress == 0 || time2.tv_sec > time1.tv_sec) { fprintf(stderr, "\rScanning: %" PRIuMAX " files, %" PRIuMAX " dirs (in %u specified)", progress, item_progress, user_item_count); } time1.tv_sec = time2.tv_sec; } /* Assemble the file's full path name, optimized to avoid strcat() */ dirlen = strlen(dir); d_name_len = strlen(dirinfo->d_name); memcpy(tp, dir, dirlen+1); if (dirlen != 0 && tp[dirlen-1] != dir_sep) { tp[dirlen] = dir_sep; dirlen++; } if (dirlen + d_name_len + 1 >= (PATHBUF_SIZE * 2)) goto error_overflow; tp += dirlen; memcpy(tp, dirinfo->d_name, d_name_len); tp += d_name_len; *tp = '\0'; d_name_len++; /* Allocate the file_t and the d_name entries */ newfile = init_newfile(dirlen + d_name_len + 2, filelistp); tp = tempname; memcpy(newfile->d_name, tp, dirlen + d_name_len); /* Single-file [l]stat() and exclusion condition check */ if (check_singlefile(newfile) != 0) { LOUD(fprintf(stderr, "grokdir: check_singlefile rejected file\n")); string_free(newfile->d_name); string_free(newfile); continue; } /* Optionally recurse directories, including symlinked ones if requested */ if (S_ISDIR(newfile->mode)) { if (recurse) { /* --one-file-system */ if (ISFLAG(flags, F_ONEFS) && (getdirstats(newfile->d_name, &n_inode, &n_device) == 0) && (device != n_device)) { LOUD(fprintf(stderr, "grokdir: directory: not recursing (--one-file-system)\n")); string_free(newfile->d_name); string_free(newfile); continue; } #ifndef NO_SYMLINKS else if (ISFLAG(flags, F_FOLLOWLINKS) || !ISFLAG(newfile->flags, F_IS_SYMLINK)) { LOUD(fprintf(stderr, "grokdir: directory(symlink): recursing (-r/-R)\n")); grokdir(newfile->d_name, filelistp, recurse); } #else else { LOUD(fprintf(stderr, "grokdir: directory: recursing (-r/-R)\n")); grokdir(newfile->d_name, filelistp, recurse); } #endif } else { LOUD(fprintf(stderr, "grokdir: directory: not recursing\n")); } string_free(newfile->d_name); string_free(newfile); continue; } else { add_single_file: /* Add regular files to list, including symlink targets if requested */ #ifndef NO_SYMLINKS if (!ISFLAG(newfile->flags, F_IS_SYMLINK) || (ISFLAG(newfile->flags, F_IS_SYMLINK) && ISFLAG(flags, F_FOLLOWLINKS))) { #else if (S_ISREG(newfile->mode)) { #endif *filelistp = newfile; filecount++; progress++; } else { LOUD(fprintf(stderr, "grokdir: not a regular file: %s\n", newfile->d_name);) string_free(newfile->d_name); string_free(newfile); if (single == 1) { single = 0; goto skip_single; } continue; } } /* Skip directory stuff if adding only a single file */ if (single == 1) { single = 0; goto skip_single; } } #ifdef UNICODE while (FindNextFile(hFind, &ffd) != 0); FindClose(hFind); #else closedir(cd); #endif skip_single: grokdir_level--; if (grokdir_level == 0 && !ISFLAG(flags, F_HIDEPROGRESS)) { fprintf(stderr, "\rScanning: %" PRIuMAX " files, %" PRIuMAX " items (in %u specified)", progress, item_progress, user_item_count); } return; error_travdone: fprintf(stderr, "\ncould not stat dir "); fwprint(stderr, dir, 1); return; error_cd: fprintf(stderr, "\ncould not chdir to "); fwprint(stderr, dir, 1); return; error_overflow: fprintf(stderr, "\nerror: a path buffer overflowed\n"); exit(EXIT_FAILURE); } /* Use Jody Bruchon's hash function on part or all of a file */ static hash_t *get_filehash(const file_t * const restrict checkfile, const size_t max_read) { off_t fsize; /* This is an array because we return a pointer to it */ static hash_t hash[1]; static hash_t chunk[(CHUNK_SIZE / sizeof(hash_t))]; FILE *file; int check = 0; if (checkfile == NULL || checkfile->d_name == NULL) nullptr("get_filehash()"); LOUD(fprintf(stderr, "get_filehash('%s', %" PRIdMAX ")\n", checkfile->d_name, (intmax_t)max_read);) /* Get the file size. If we can't read it, bail out early */ if (checkfile->size == -1) { LOUD(fprintf(stderr, "get_filehash: not hashing because stat() info is bad\n")); return NULL; } fsize = checkfile->size; /* Do not read more than the requested number of bytes */ if (max_read > 0 && fsize > (off_t)max_read) fsize = (off_t)max_read; /* Initialize the hash and file read parameters (with filehash_partial skipped) * * If we already hashed the first chunk of this file, we don't want to * wastefully read and hash it again, so skip the first chunk and use * the computed hash for that chunk as our starting point. * * WARNING: We assume max_read is NEVER less than CHUNK_SIZE here! */ *hash = 0; if (ISFLAG(checkfile->flags, F_HASH_PARTIAL)) { *hash = checkfile->filehash_partial; /* Don't bother going further if max_read is already fulfilled */ if (max_read != 0 && max_read <= PARTIAL_HASH_SIZE) { LOUD(fprintf(stderr, "Partial hash size (%d) >= max_read (%" PRIuMAX "), not hashing anymore\n", PARTIAL_HASH_SIZE, (uintmax_t)max_read);) return hash; } } errno = 0; #ifdef UNICODE if (!M2W(checkfile->d_name, wstr)) file = NULL; else file = _wfopen(wstr, FILE_MODE_RO); #else file = fopen(checkfile->d_name, FILE_MODE_RO); #endif if (file == NULL) { fprintf(stderr, "\n%s error opening file ", strerror(errno)); fwprint(stderr, checkfile->d_name, 1); return NULL; } /* Actually seek past the first chunk if applicable * This is part of the filehash_partial skip optimization */ if (ISFLAG(checkfile->flags, F_HASH_PARTIAL)) { if (fseeko(file, PARTIAL_HASH_SIZE, SEEK_SET) == -1) { fclose(file); fprintf(stderr, "\nerror seeking in file "); fwprint(stderr, checkfile->d_name, 1); return NULL; } fsize -= PARTIAL_HASH_SIZE; } /* Read the file in CHUNK_SIZE chunks until we've read it all. */ while (fsize > 0) { size_t bytes_to_read; if (interrupt) return 0; bytes_to_read = (fsize >= (off_t)auto_chunk_size) ? auto_chunk_size : (size_t)fsize; if (fread((void *)chunk, bytes_to_read, 1, file) != 1) { fprintf(stderr, "\nerror reading from file "); fwprint(stderr, checkfile->d_name, 1); fclose(file); return NULL; } *hash = jody_block_hash(chunk, *hash, bytes_to_read); if ((off_t)bytes_to_read > fsize) break; else fsize -= (off_t)bytes_to_read; if (!ISFLAG(flags, F_HIDEPROGRESS)) { check++; if (check > CHECK_MINIMUM) { update_progress("hashing", (int)(((checkfile->size - fsize) * 100) / checkfile->size)); check = 0; } } } fclose(file); LOUD(fprintf(stderr, "get_filehash: returning hash: 0x%016jx\n", (uintmax_t)*hash)); return hash; } static inline void registerfile(filetree_t * restrict * const restrict nodeptr, const enum tree_direction d, file_t * const restrict file) { filetree_t * restrict branch; if (nodeptr == NULL || file == NULL || (d != NONE && *nodeptr == NULL)) nullptr("registerfile()"); LOUD(fprintf(stderr, "registerfile(direction %d)\n", d)); /* Allocate and initialize a new node for the file */ branch = (filetree_t *)string_malloc(sizeof(filetree_t)); if (branch == NULL) oom("registerfile() branch"); branch->file = file; branch->left = NULL; branch->right = NULL; #ifdef USE_TREE_REBALANCE branch->left_weight = 0; branch->right_weight = 0; /* Attach the new node to the requested branch and the parent */ switch (d) { case LEFT: branch->parent = *nodeptr; (*nodeptr)->left = branch; (*nodeptr)->left_weight++; break; case RIGHT: branch->parent = *nodeptr; (*nodeptr)->right = branch; (*nodeptr)->right_weight++; break; case NONE: /* For the root of the tree only */ branch->parent = NULL; *nodeptr = branch; break; default: /* This should never ever happen */ fprintf(stderr, "\ninternal error: invalid direction for registerfile(), report this\n"); string_malloc_destroy(); exit(EXIT_FAILURE); break; } /* Propagate weights up the tree */ while (branch->parent != NULL) { filetree_t * restrict up; up = branch->parent; if (up->left == branch) up->left_weight++; else if (up->right == branch) up->right_weight++; else { fprintf(stderr, "\nInternal error: file tree linkage is broken\n"); exit(EXIT_FAILURE); } branch = up; } #else /* USE_TREE_REBALANCE */ /* Attach the new node to the requested branch */ switch (d) { case LEFT: (*nodeptr)->left = branch; break; case RIGHT: (*nodeptr)->right = branch; break; case NONE: /* For the root of the tree only */ *nodeptr = branch; break; default: /* This should never ever happen */ fprintf(stderr, "\ninternal error: invalid direction for registerfile(), report this\n"); string_malloc_destroy(); exit(EXIT_FAILURE); break; } #endif /* USE_TREE_REBALANCE */ return; } /* Experimental tree rebalance code. This slows things down in testing * but may be more useful in the future. Pass -DUSE_TREE_REBALANCE * to try it. */ #ifdef USE_TREE_REBALANCE /* How much difference to ignore when considering a rebalance */ #ifndef BALANCE_THRESHOLD #define BALANCE_THRESHOLD 4 #endif /* Rebalance the file tree to reduce search depth */ static inline void rebalance_tree(filetree_t * const tree) { filetree_t * restrict promote; filetree_t * restrict demote; int difference, direction; #ifdef CONSIDER_IMBALANCE int l, r, imbalance; #endif if (!tree) return; /* Rebalance all children first */ if (tree->left_weight > BALANCE_THRESHOLD) rebalance_tree(tree->left); if (tree->right_weight > BALANCE_THRESHOLD) rebalance_tree(tree->right); /* If weights are within a certain threshold, do nothing */ direction = tree->right_weight - tree->left_weight; difference = direction; if (difference < 0) difference = -difference; if (difference <= BALANCE_THRESHOLD) return; /* Determine if a tree rotation will help, and do it if so */ if (direction > 0) { #ifdef CONSIDER_IMBALANCE l = tree->right->left_weight + tree->right_weight; r = tree->right->right_weight; imbalance = l - r; if (imbalance < 0) imbalance = -imbalance; /* Don't rotate if imbalance will increase */ if (imbalance >= difference) return; #endif /* CONSIDER_IMBALANCE */ /* Rotate the right node up one level */ promote = tree->right; demote = tree; /* Attach new parent's left tree to old parent */ demote->right = promote->left; demote->right_weight = promote->left_weight; /* Attach old parent to new parent */ promote->left = demote; promote->left_weight = demote->left_weight + demote->right_weight + 1; /* Reconnect parent linkages */ promote->parent = demote->parent; if (demote->right) demote->right->parent = demote; demote->parent = promote; if (promote->parent == NULL) checktree = promote; else if (promote->parent->left == demote) promote->parent->left = promote; else promote->parent->right = promote; return; } else if (direction < 0) { #ifdef CONSIDER_IMBALANCE r = tree->left->right_weight + tree->left_weight; l = tree->left->left_weight; imbalance = r - l; if (imbalance < 0) imbalance = -imbalance; /* Don't rotate if imbalance will increase */ if (imbalance >= difference) return; #endif /* CONSIDER_IMBALANCE */ /* Rotate the left node up one level */ promote = tree->left; demote = tree; /* Attach new parent's right tree to old parent */ demote->left = promote->right; demote->left_weight = promote->right_weight; /* Attach old parent to new parent */ promote->right = demote; promote->right_weight = demote->right_weight + demote->left_weight + 1; /* Reconnect parent linkages */ promote->parent = demote->parent; if (demote->left) demote->left->parent = demote; demote->parent = promote; if (promote->parent == NULL) checktree = promote; else if (promote->parent->left == demote) promote->parent->left = promote; else promote->parent->right = promote; return; } /* Fall through */ return; } #endif /* USE_TREE_REBALANCE */ #ifdef TREE_DEPTH_STATS #define TREE_DEPTH_UPDATE_MAX() { if (max_depth < tree_depth) max_depth = tree_depth; tree_depth = 0; } #else #define TREE_DEPTH_UPDATE_MAX() #endif /* Check two files for a match */ static file_t **checkmatch(filetree_t * restrict tree, file_t * const restrict file) { int cmpresult = 0; const hash_t * restrict filehash; if (tree == NULL || file == NULL || tree->file == NULL || tree->file->d_name == NULL || file->d_name == NULL) nullptr("checkmatch()"); LOUD(fprintf(stderr, "checkmatch ('%s', '%s')\n", tree->file->d_name, file->d_name)); /* If device and inode fields are equal one of the files is a * hard link to the other or the files have been listed twice * unintentionally. We don't want to flag these files as * duplicates unless the user specifies otherwise. */ /* Count the total number of comparisons requested */ DBG(comparisons++;) /* If considering hard linked files as duplicates, they are * automatically duplicates without being read further since * they point to the exact same inode. If we aren't considering * hard links as duplicates, we just return NULL. */ cmpresult = check_conditions(tree->file, file); switch (cmpresult) { case 2: return &tree->file; /* linked files + -H switch */ case -2: return NULL; /* linked files, no -H switch */ default: break; } /* If preliminary matching succeeded, move to full file checks */ if (cmpresult == 0) { LOUD(fprintf(stderr, "checkmatch: starting file data comparisons\n")); /* Attempt to exclude files quickly with partial file hashing */ if (!ISFLAG(tree->file->flags, F_HASH_PARTIAL)) { filehash = get_filehash(tree->file, PARTIAL_HASH_SIZE); if (filehash == NULL) return NULL; tree->file->filehash_partial = *filehash; SETFLAG(tree->file->flags, F_HASH_PARTIAL); } if (!ISFLAG(file->flags, F_HASH_PARTIAL)) { filehash = get_filehash(file, PARTIAL_HASH_SIZE); if (filehash == NULL) return NULL; file->filehash_partial = *filehash; SETFLAG(file->flags, F_HASH_PARTIAL); } cmpresult = HASH_COMPARE(file->filehash_partial, tree->file->filehash_partial); LOUD(if (!cmpresult) fprintf(stderr, "checkmatch: partial hashes match\n")); LOUD(if (cmpresult) fprintf(stderr, "checkmatch: partial hashes do not match\n")); DBG(partial_hash++;) if (file->size <= PARTIAL_HASH_SIZE) { LOUD(fprintf(stderr, "checkmatch: small file: copying partial hash to full hash\n")); /* filehash_partial = filehash if file is small enough */ if (!ISFLAG(file->flags, F_HASH_FULL)) { file->filehash = file->filehash_partial; SETFLAG(file->flags, F_HASH_FULL); DBG(small_file++;) } if (!ISFLAG(tree->file->flags, F_HASH_FULL)) { tree->file->filehash = tree->file->filehash_partial; SETFLAG(tree->file->flags, F_HASH_FULL); DBG(small_file++;) } } else if (cmpresult == 0) { /* If partial match was correct, perform a full file hash match */ if (!ISFLAG(tree->file->flags, F_HASH_FULL)) { filehash = get_filehash(tree->file, 0); if (filehash == NULL) return NULL; tree->file->filehash = *filehash; SETFLAG(tree->file->flags, F_HASH_FULL); } if (!ISFLAG(file->flags, F_HASH_FULL)) { filehash = get_filehash(file, 0); if (filehash == NULL) return NULL; file->filehash = *filehash; SETFLAG(file->flags, F_HASH_FULL); } /* Full file hash comparison */ cmpresult = HASH_COMPARE(file->filehash, tree->file->filehash); LOUD(if (!cmpresult) fprintf(stderr, "checkmatch: full hashes match\n")); LOUD(if (cmpresult) fprintf(stderr, "checkmatch: full hashes do not match\n")); DBG(full_hash++); } else { DBG(partial_elim++); } } if (cmpresult < 0) { if (tree->left != NULL) { LOUD(fprintf(stderr, "checkmatch: recursing tree: left\n")); DBG(left_branch++; tree_depth++;) return checkmatch(tree->left, file); } else { LOUD(fprintf(stderr, "checkmatch: registering file: left\n")); registerfile(&tree, LEFT, file); TREE_DEPTH_UPDATE_MAX(); return NULL; } } else if (cmpresult > 0) { if (tree->right != NULL) { LOUD(fprintf(stderr, "checkmatch: recursing tree: right\n")); DBG(right_branch++; tree_depth++;) return checkmatch(tree->right, file); } else { LOUD(fprintf(stderr, "checkmatch: registering file: right\n")); registerfile(&tree, RIGHT, file); TREE_DEPTH_UPDATE_MAX(); return NULL; } } else { /* All compares matched */ DBG(partial_to_full++;) TREE_DEPTH_UPDATE_MAX(); LOUD(fprintf(stderr, "checkmatch: files appear to match based on hashes\n")); return &tree->file; } /* Fall through - should never be reached */ return NULL; } /* Do a byte-by-byte comparison in case two different files produce the same signature. Unlikely, but better safe than sorry. */ static inline int confirmmatch(FILE * const restrict file1, FILE * const restrict file2, off_t size) { static char c1[CHUNK_SIZE], c2[CHUNK_SIZE]; size_t r1, r2; off_t bytes = 0; int check = 0; if (file1 == NULL || file2 == NULL) nullptr("confirmmatch()"); LOUD(fprintf(stderr, "confirmmatch running\n")); fseek(file1, 0, SEEK_SET); fseek(file2, 0, SEEK_SET); do { if (interrupt) return 0; r1 = fread(c1, sizeof(char), auto_chunk_size, file1); r2 = fread(c2, sizeof(char), auto_chunk_size, file2); if (r1 != r2) return 0; /* file lengths are different */ if (memcmp (c1, c2, r1)) return 0; /* file contents are different */ if (!ISFLAG(flags, F_HIDEPROGRESS)) { check++; bytes += (off_t)r1; if (check > CHECK_MINIMUM) { update_progress("confirm", (int)((bytes * 100) / size)); check = 0; } } } while (r2); return 1; } /* Count the following statistics: - Maximum number of files in a duplicate set (length of longest dupe chain) - Number of non-zero-length files that have duplicates (if n_files != NULL) - Total number of duplicate file sets (groups) */ extern unsigned int get_max_dupes(const file_t *files, unsigned int * const restrict max, unsigned int * const restrict n_files) { unsigned int groups = 0; if (files == NULL || max == NULL) nullptr("get_max_dupes()"); LOUD(fprintf(stderr, "get_max_dupes(%p, %p, %p)\n", (const void *)files, (void *)max, (void *)n_files)); *max = 0; if (n_files) *n_files = 0; while (files) { unsigned int n_dupes; if (ISFLAG(files->flags, F_HAS_DUPES)) { groups++; if (n_files && files->size) (*n_files)++; n_dupes = 1; for (file_t *curdupe = files->duplicates; curdupe; curdupe = curdupe->duplicates) n_dupes++; if (n_dupes > *max) *max = n_dupes; } files = files->next; } return groups; } static int sort_pairs_by_param_order(file_t *f1, file_t *f2) { if (!ISFLAG(flags, F_USEPARAMORDER)) return 0; if (f1 == NULL || f2 == NULL) nullptr("sort_pairs_by_param_order()"); if (f1->user_order < f2->user_order) return -sort_direction; if (f1->user_order > f2->user_order) return sort_direction; return 0; } static int sort_pairs_by_mtime(file_t *f1, file_t *f2) { if (f1 == NULL || f2 == NULL) nullptr("sort_pairs_by_mtime()"); int po = sort_pairs_by_param_order(f1, f2); if (po != 0) return po; if (f1->mtime < f2->mtime) return -sort_direction; else if (f1->mtime > f2->mtime) return sort_direction; return 0; } static int sort_pairs_by_filename(file_t *f1, file_t *f2) { if (f1 == NULL || f2 == NULL) nullptr("sort_pairs_by_filename()"); int po = sort_pairs_by_param_order(f1, f2); if (po != 0) return po; return numeric_sort(f1->d_name, f2->d_name, sort_direction); } static void registerpair(file_t **matchlist, file_t *newmatch, int (*comparef)(file_t *f1, file_t *f2)) { file_t *traverse; file_t *back; /* NULL pointer sanity checks */ if (matchlist == NULL || newmatch == NULL || comparef == NULL) nullptr("registerpair()"); LOUD(fprintf(stderr, "registerpair: '%s', '%s'\n", (*matchlist)->d_name, newmatch->d_name);) SETFLAG((*matchlist)->flags, F_HAS_DUPES); back = NULL; traverse = *matchlist; /* FIXME: This needs to be changed! As it currently stands, the compare * function only runs on a pair as it is registered and future pairs can * mess up the sort order. A separate sorting function should happen before * the dupe chain is acted upon rather than while pairs are registered. */ while (traverse) { if (comparef(newmatch, traverse) <= 0) { newmatch->duplicates = traverse; if (!back) { *matchlist = newmatch; /* update pointer to head of list */ SETFLAG(newmatch->flags, F_HAS_DUPES); CLEARFLAG(traverse->flags, F_HAS_DUPES); /* flag is only for first file in dupe chain */ } else back->duplicates = newmatch; break; } else { if (traverse->duplicates == 0) { traverse->duplicates = newmatch; if (!back) SETFLAG(traverse->flags, F_HAS_DUPES); break; } } back = traverse; traverse = traverse->duplicates; } return; } static inline void help_text(void) { printf("Usage: jdupes [options] DIRECTORY...\n\n"); #ifdef LOUD printf(" -@ --loud \toutput annoying low-level debug info while running\n"); #endif printf(" -1 --one-file-system \tdo not match files on different filesystems/devices\n"); printf(" -A --nohidden \texclude hidden files from consideration\n"); #ifdef ENABLE_BTRFS printf(" -B --dedupe \tSend matches to btrfs for block-level deduplication\n"); #endif printf(" -d --delete \tprompt user for files to preserve and delete all\n"); printf(" \tothers; important: under particular circumstances,\n"); printf(" \tdata may be lost when using this option together\n"); printf(" \twith -s or --symlinks, or when specifying a\n"); printf(" \tparticular directory more than once; refer to the\n"); printf(" \tdocumentation for additional information\n"); #ifdef DEBUG printf(" -D --debug \toutput debug statistics after completion\n"); #endif printf(" -f --omitfirst \tomit the first file in each set of matches\n"); printf(" -h --help \tdisplay this help message\n"); #ifndef NO_HARDLINKS printf(" -H --hardlinks \ttreat any linked files as duplicate files. Normally\n"); printf(" \tlinked files are treated as non-duplicates for safety\n"); #endif printf(" -i --reverse \treverse (invert) the match sort order\n"); printf(" -I --isolate \tfiles in the same specified directory won't match\n"); #ifndef NO_SYMLINKS printf(" -l --linksoft \tmake relative symlinks for duplicates w/o prompting\n"); #endif #ifndef NO_HARDLINKS printf(" -L --linkhard \thard link all duplicate files without prompting\n"); #ifdef ON_WINDOWS printf(" \tWindows allows a maximum of 1023 hard links per file\n"); #endif /* ON_WINDOWS */ #endif /* NO_HARDLINKS */ printf(" -m --summarize \tsummarize dupe information\n"); //printf(" -n --noempty \texclude zero-length files from consideration\n"); printf(" -N --noprompt \ttogether with --delete, preserve the first file in\n"); printf(" \teach set of duplicates and delete the rest without\n"); printf(" \tprompting the user\n"); printf(" -o --order=BY \tselect sort order for output, linking and deleting; by\n"); printf(" -O --paramorder \tParameter order is more important than selected -O sort\n"); printf(" \tmtime (BY=time) or filename (BY=name, the default)\n"); #ifndef NO_PERMS printf(" -p --permissions \tdon't consider files with different owner/group or\n"); printf(" \tpermission bits as duplicates\n"); #endif printf(" -Q --quick \tskip byte-for-byte confirmation for quick matching\n"); printf(" \tWARNING: -Q can result in data loss! Be very careful!\n"); printf(" -r --recurse \tfor every directory, process its subdirectories too\n"); printf(" -R --recurse: \tfor each directory given after this option follow\n"); printf(" \tsubdirectories encountered within (note the ':' at\n"); printf(" \tthe end of the option, manpage for more details)\n"); #ifndef NO_SYMLINKS printf(" -s --symlinks \tfollow symlinks\n"); #endif printf(" -S --size \tshow size of duplicate files\n"); printf(" -q --quiet \thide progress indicator\n"); /* This is undocumented in the quick help because it is a dangerous option. If you * really want it, uncomment it here, and may your data rest in peace. */ /* printf(" -Q --quick \tskip byte-by-byte duplicate verification. WARNING:\n"); printf(" \tthis may delete non-duplicates! Read the manual first!\n"); */ printf(" -v --version \tdisplay jdupes version and license information\n"); printf(" -x --xsize=SIZE \texclude files of size < SIZE bytes from consideration\n"); printf(" --xsize=+SIZE \t'+' specified before SIZE, exclude size > SIZE\n"); printf(" -X --exclude=spec:info\texclude files based on specified criteria\n"); printf(" \tspecs: dir size+-=\n"); printf(" \tExclusions are cumulative: -X dir:abc -X dir:efg\n"); printf(" -z --zeromatch \tconsider zero-length files to be duplicates\n"); printf(" -Z --softabort \tIf the user aborts (i.e. CTRL-C) act on matches so far\n"); printf("\nFor sizes, K/M/G/T/P/E[B|iB] suffixes can be used (case-insensitive)\n"); #ifdef OMIT_GETOPT_LONG printf("Note: Long options are not supported in this build.\n\n"); #endif } #ifdef UNICODE int wmain(int argc, wchar_t **wargv) #else int main(int argc, char **argv) #endif { static struct proc_cacheinfo pci; static file_t *files = NULL; static file_t *curfile; static char **oldargv; static char *xs; static int firstrecurse; static int opt; static int pm = 1; static ordertype_t ordertype = ORDER_NAME; #ifndef OMIT_GETOPT_LONG static const struct option long_options[] = { { "loud", 0, 0, '@' }, { "one-file-system", 0, 0, '1' }, { "nohidden", 0, 0, 'A' }, { "dedupe", 0, 0, 'B' }, { "delete", 0, 0, 'd' }, { "debug", 0, 0, 'D' }, { "omitfirst", 0, 0, 'f' }, { "help", 0, 0, 'h' }, { "hardlinks", 0, 0, 'H' }, { "reverse", 0, 0, 'i' }, { "isolate", 0, 0, 'I' }, { "linksoft", 0, 0, 'l' }, { "linkhard", 0, 0, 'L' }, { "summarize", 0, 0, 'm'}, { "summary", 0, 0, 'm' }, { "noempty", 0, 0, 'n' }, { "noprompt", 0, 0, 'N' }, { "order", 1, 0, 'o' }, { "paramorder", 0, 0, 'O' }, { "permissions", 0, 0, 'p' }, { "quiet", 0, 0, 'q' }, { "quick", 0, 0, 'Q' }, { "recurse", 0, 0, 'r' }, { "recursive", 0, 0, 'r' }, { "recurse:", 0, 0, 'R' }, { "recursive:", 0, 0, 'R' }, { "symlinks", 0, 0, 's' }, { "size", 0, 0, 'S' }, { "version", 0, 0, 'v' }, { "xsize", 1, 0, 'x' }, { "exclude", 1, 0, 'X' }, { "zeromatch", 0, 0, 'z' }, { "softabort", 0, 0, 'Z' }, { NULL, 0, 0, 0 } }; #define GETOPT getopt_long #else #define GETOPT getopt #endif /* Windows buffers our stderr output; don't let it do that */ #ifdef ON_WINDOWS if (setvbuf(stderr, NULL, _IONBF, 0) != 0) fprintf(stderr, "warning: setvbuf() failed\n"); #endif #ifdef UNICODE /* Create a UTF-8 **argv from the wide version */ static char **argv; argv = (char **)string_malloc(sizeof(char *) * argc); if (!argv) oom("main() unicode argv"); widearg_to_argv(argc, wargv, argv); /* fix up __argv so getopt etc. don't crash */ __argv = argv; /* Only use UTF-16 for terminal output, else use UTF-8 */ if (!_isatty(_fileno(stdout))) out_mode = _O_BINARY; else out_mode = _O_U16TEXT; if (!_isatty(_fileno(stderr))) err_mode = _O_BINARY; else err_mode = _O_U16TEXT; #endif /* UNICODE */ /* Auto-tune chunk size to be half of L1 data cache if possible */ get_proc_cacheinfo(&pci); if (pci.l1 != 0) auto_chunk_size = (pci.l1 / 2); else if (pci.l1d != 0) auto_chunk_size = (pci.l1d / 2); /* Must be at least 4096 (4 KiB) and cannot exceed CHUNK_SIZE */ if (auto_chunk_size < 4096 || auto_chunk_size > CHUNK_SIZE) auto_chunk_size = CHUNK_SIZE; /* Force to a multiple of 4096 if it isn't already */ if ((auto_chunk_size & 0x00000fffUL) != 0) auto_chunk_size = (auto_chunk_size + 0x00000fffUL) & 0x000ff000; program_name = argv[0]; oldargv = cloneargs(argc, argv); while ((opt = GETOPT(argc, argv, "@1ABdDfhHiIlLmnNOpqQrRsSvzZo:x:X:" #ifndef OMIT_GETOPT_LONG , long_options, NULL #endif )) != EOF) { switch (opt) { case '1': SETFLAG(flags, F_ONEFS); break; case 'A': SETFLAG(flags, F_EXCLUDEHIDDEN); break; case 'd': SETFLAG(flags, F_DELETEFILES); break; case 'D': #ifdef DEBUG SETFLAG(flags, F_DEBUG); #endif break; case 'f': SETFLAG(flags, F_OMITFIRST); break; case 'h': help_text(); string_malloc_destroy(); exit(EXIT_FAILURE); #ifndef NO_HARDLINKS case 'H': SETFLAG(flags, F_CONSIDERHARDLINKS); break; case 'L': SETFLAG(flags, F_HARDLINKFILES); break; #endif case 'i': SETFLAG(flags, F_REVERSESORT); break; case 'I': SETFLAG(flags, F_ISOLATE); break; case 'm': SETFLAG(flags, F_SUMMARIZEMATCHES); break; case 'n': //fprintf(stderr, "note: -n/--noempty is the default behavior now and is deprecated.\n"); break; case 'N': SETFLAG(flags, F_NOPROMPT); break; case 'O': SETFLAG(flags, F_USEPARAMORDER); break; case 'p': SETFLAG(flags, F_PERMISSIONS); break; case 'q': SETFLAG(flags, F_HIDEPROGRESS); break; case 'Q': SETFLAG(flags, F_QUICKCOMPARE); break; case 'r': SETFLAG(flags, F_RECURSE); break; case 'R': SETFLAG(flags, F_RECURSEAFTER); break; #ifndef NO_SYMLINKS case 'l': SETFLAG(flags, F_MAKESYMLINKS); break; case 's': SETFLAG(flags, F_FOLLOWLINKS); break; #endif case 'S': SETFLAG(flags, F_SHOWSIZE); break; case 'z': SETFLAG(flags, F_INCLUDEEMPTY); break; case 'Z': SETFLAG(flags, F_SOFTABORT); break; case 'x': fprintf(stderr, "-x/--xsize is deprecated; use -X size[+-=]:size[suffix] instead\n"); xs = string_malloc(8 + strlen(optarg)); if (xs == NULL) oom("xsize temp string"); strcat(xs, "size"); if (*optarg == '+') { strcat(xs, "+:"); optarg++; } else { strcat(xs, "-=:"); } strcat(xs, optarg); add_exclude(xs); string_free(xs); break; case 'X': add_exclude(optarg); break; case '@': #ifdef LOUD_DEBUG SETFLAG(flags, F_DEBUG | F_LOUD | F_HIDEPROGRESS); #endif break; case 'v': printf("jdupes %s (%s) ", VER, VERDATE); /* Indicate bitness information */ if (sizeof(uintptr_t) == 8) { if (sizeof(long) == 4) printf("64-bit i32\n"); else if (sizeof(long) == 8) printf("64-bit\n"); } else if (sizeof(uintptr_t) == 4) { if (sizeof(long) == 4) printf("32-bit\n"); else if (sizeof(long) == 8) printf("32-bit i64\n"); } else printf("%u-bit i%u\n", (unsigned int)(sizeof(uintptr_t) * 8), (unsigned int)(sizeof(long) * 8)); printf("Compile-time extensions:"); if (*extensions != NULL) { int c = 0; while (extensions[c] != NULL) { printf(" %s", extensions[c]); c++; } } else printf(" none"); printf("\nCopyright (C) 2015-2017 by Jody Bruchon\n"); printf("\nPermission is hereby granted, free of charge, to any person\n"); printf("obtaining a copy of this software and associated documentation files\n"); printf("(the \"Software\"), to deal in the Software without restriction,\n"); printf("including without limitation the rights to use, copy, modify, merge,\n"); printf("publish, distribute, sublicense, and/or sell copies of the Software,\n"); printf("and to permit persons to whom the Software is furnished to do so,\n"); printf("subject to the following conditions:\n\n"); printf("The above copyright notice and this permission notice shall be\n"); printf("included in all copies or substantial portions of the Software.\n\n"); printf("THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS\n"); printf("OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF\n"); printf("MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.\n"); printf("IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY\n"); printf("CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,\n"); printf("TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE\n"); printf("SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.\n"); exit(EXIT_SUCCESS); case 'o': if (!strncasecmp("name", optarg, 5)) { ordertype = ORDER_NAME; } else if (!strncasecmp("time", optarg, 5)) { ordertype = ORDER_TIME; } else { fprintf(stderr, "invalid value for --order: '%s'\n", optarg); exit(EXIT_FAILURE); } break; case 'B': #ifdef ENABLE_BTRFS SETFLAG(flags, F_DEDUPEFILES); /* btrfs will do the byte-for-byte check itself */ SETFLAG(flags, F_QUICKCOMPARE); /* It is completely useless to dedupe zero-length extents */ CLEARFLAG(flags, F_INCLUDEEMPTY); #else fprintf(stderr, "This program was built without btrfs support\n"); exit(EXIT_FAILURE); #endif break; default: if (opt != '?') fprintf(stderr, "Sorry, using '-%c' is not supported in this build.\n", opt); fprintf(stderr, "Try `jdupes --help' for more information.\n"); string_malloc_destroy(); exit(EXIT_FAILURE); } } if (optind >= argc) { fprintf(stderr, "no directories specified (use -h option for help)\n"); string_malloc_destroy(); exit(EXIT_FAILURE); } if (ISFLAG(flags, F_RECURSE) && ISFLAG(flags, F_RECURSEAFTER)) { fprintf(stderr, "options --recurse and --recurse: are not compatible\n"); string_malloc_destroy(); exit(EXIT_FAILURE); } if (ISFLAG(flags, F_SUMMARIZEMATCHES) && ISFLAG(flags, F_DELETEFILES)) { fprintf(stderr, "options --summarize and --delete are not compatible\n"); string_malloc_destroy(); exit(EXIT_FAILURE); } #ifdef ENABLE_BTRFS if (ISFLAG(flags, F_CONSIDERHARDLINKS) && ISFLAG(flags, F_DEDUPEFILES)) fprintf(stderr, "warning: option --dedupe overrides the behavior of --hardlinks\n"); #endif /* If pm == 0, call printmatches() */ pm = !!ISFLAG(flags, F_SUMMARIZEMATCHES) + !!ISFLAG(flags, F_DELETEFILES) + !!ISFLAG(flags, F_HARDLINKFILES) + !!ISFLAG(flags, F_MAKESYMLINKS) + !!ISFLAG(flags, F_DEDUPEFILES); if (pm > 1) { fprintf(stderr, "Only one of --summarize, --delete, --linkhard, --linksoft, or --dedupe\nmay be used\n"); string_malloc_destroy(); exit(EXIT_FAILURE); } if (pm == 0) SETFLAG(flags, F_PRINTMATCHES); if (ISFLAG(flags, F_RECURSEAFTER)) { firstrecurse = nonoptafter("--recurse:", argc, oldargv, argv); if (firstrecurse == argc) firstrecurse = nonoptafter("-R", argc, oldargv, argv); if (firstrecurse == argc) { fprintf(stderr, "-R option must be isolated from other options\n"); string_malloc_destroy(); exit(EXIT_FAILURE); } /* F_RECURSE is not set for directories before --recurse: */ for (int x = optind; x < firstrecurse; x++) { slash_convert(argv[x]); grokdir(argv[x], &files, 0); user_item_count++; } /* Set F_RECURSE for directories after --recurse: */ SETFLAG(flags, F_RECURSE); for (int x = firstrecurse; x < argc; x++) { slash_convert(argv[x]); grokdir(argv[x], &files, 1); user_item_count++; } } else { for (int x = optind; x < argc; x++) { slash_convert(argv[x]); grokdir(argv[x], &files, ISFLAG(flags, F_RECURSE)); user_item_count++; } } if (ISFLAG(flags, F_REVERSESORT)) sort_direction = -1; if (!ISFLAG(flags, F_HIDEPROGRESS)) fprintf(stderr, "\n"); if (!files) { fwprint(stderr, "No duplicates found.", 1); exit(EXIT_SUCCESS); } curfile = files; progress = 0; /* Catch CTRL-C */ signal(SIGINT, sighandler); while (curfile) { static file_t **match = NULL; static FILE *file1; static FILE *file2; #ifdef USE_TREE_REBALANCE static unsigned int depth_threshold = INITIAL_DEPTH_THRESHOLD; #endif if (interrupt) { fprintf(stderr, "\nStopping file scan due to user abort\n"); if (!ISFLAG(flags, F_SOFTABORT)) exit(EXIT_FAILURE); interrupt = 0; /* reset interrupt for re-use */ goto skip_file_scan; } LOUD(fprintf(stderr, "\nMAIN: current file: %s\n", curfile->d_name)); if (!checktree) registerfile(&checktree, NONE, curfile); else match = checkmatch(checktree, curfile); #ifdef USE_TREE_REBALANCE /* Rebalance the match tree after a certain number of files processed */ if (max_depth > depth_threshold) { rebalance_tree(checktree); max_depth = 0; if (depth_threshold < 512) depth_threshold <<= 1; else depth_threshold += 64; } #endif /* USE_TREE_REBALANCE */ /* Byte-for-byte check that a matched pair are actually matched */ if (match != NULL) { /* Quick comparison mode will never run confirmmatch() * Also skip match confirmation for hard-linked files * (This set of comparisons is ugly, but quite efficient) */ if (ISFLAG(flags, F_QUICKCOMPARE) || (ISFLAG(flags, F_CONSIDERHARDLINKS) && (curfile->inode == (*match)->inode) && (curfile->device == (*match)->device)) ) { LOUD(fprintf(stderr, "MAIN: notice: quick compare match (-Q)\n")); registerpair(match, curfile, (ordertype == ORDER_TIME) ? sort_pairs_by_mtime : sort_pairs_by_filename); dupecount++; goto skip_full_check; } #ifdef UNICODE if (!M2W(curfile->d_name, wstr)) file1 = NULL; else file1 = _wfopen(wstr, FILE_MODE_RO); #else file1 = fopen(curfile->d_name, FILE_MODE_RO); #endif if (!file1) { curfile = curfile->next; continue; } #ifdef UNICODE if (!M2W((*match)->d_name, wstr)) file2 = NULL; else file2 = _wfopen(wstr, FILE_MODE_RO); #else file2 = fopen((*match)->d_name, FILE_MODE_RO); #endif if (!file2) { fclose(file1); curfile = curfile->next; continue; } if (confirmmatch(file1, file2, curfile->size)) { LOUD(fprintf(stderr, "MAIN: registering matched file pair\n")); registerpair(match, curfile, (ordertype == ORDER_TIME) ? sort_pairs_by_mtime : sort_pairs_by_filename); dupecount++; } DBG(else hash_fail++;) fclose(file1); fclose(file2); } skip_full_check: curfile = curfile->next; if (!ISFLAG(flags, F_HIDEPROGRESS)) update_progress(NULL, -1); progress++; } if (!ISFLAG(flags, F_HIDEPROGRESS)) fprintf(stderr, "\r%60s\r", " "); skip_file_scan: /* Stop catching CTRL+C */ signal(SIGINT, SIG_DFL); if (ISFLAG(flags, F_DELETEFILES)) { if (ISFLAG(flags, F_NOPROMPT)) deletefiles(files, 0, 0); else deletefiles(files, 1, stdin); } if (ISFLAG(flags, F_SUMMARIZEMATCHES)) summarizematches(files); #ifndef NO_SYMLINKS if (ISFLAG(flags, F_MAKESYMLINKS)) linkfiles(files, 0); #endif #ifndef NO_HARDLINKS if (ISFLAG(flags, F_HARDLINKFILES)) linkfiles(files, 1); #endif /* NO_HARDLINKS */ #ifdef ENABLE_BTRFS if (ISFLAG(flags, F_DEDUPEFILES)) dedupefiles(files); #endif /* ENABLE_BTRFS */ if (ISFLAG(flags, F_PRINTMATCHES)) printmatches(files); string_malloc_destroy(); #ifdef DEBUG if (ISFLAG(flags, F_DEBUG)) { fprintf(stderr, "\n%d partial (+%d small) -> %d full hash -> %d full (%d partial elim) (%d hash%u fail)\n", partial_hash, small_file, full_hash, partial_to_full, partial_elim, hash_fail, (unsigned int)sizeof(hash_t)*8); fprintf(stderr, "%" PRIuMAX " total files, %" PRIuMAX " comparisons, branch L %u, R %u, both %u, max tree depth %u\n", filecount, comparisons, left_branch, right_branch, left_branch + right_branch, max_depth); fprintf(stderr, "SMA: allocs %" PRIuMAX ", free %" PRIuMAX " (merge %" PRIuMAX ", repl %" PRIuMAX "), fail %" PRIuMAX ", reuse %" PRIuMAX ", scan %" PRIuMAX ", tails %" PRIuMAX "\n", sma_allocs, sma_free_good, sma_free_merged, sma_free_replaced, sma_free_ignored, sma_free_reclaimed, sma_free_scanned, sma_free_tails); fprintf(stderr, "I/O chunk size: %" PRIuMAX " KiB (%s)\n", (uintmax_t)(auto_chunk_size >> 10), (pci.l1 + pci.l1d) != 0 ? "dynamically sized" : "default size"); #ifdef ON_WINDOWS #ifndef NO_HARDLINKS if (ISFLAG(flags, F_HARDLINKFILES)) fprintf(stderr, "Exclusions based on Windows hard link limit: %u\n", hll_exclude); #endif #endif } #endif /* DEBUG */ exit(EXIT_SUCCESS); } jdupes-1.9/jdupes.h000066400000000000000000000153501321117252000143160ustar00rootroot00000000000000/* jdupes main program header * See jdupes.c for license information */ #ifndef JDUPES_H #define JDUPES_H #ifdef __cplusplus extern "C" { #endif /* Detect Windows and modify as needed */ #if defined _WIN32 || defined __CYGWIN__ #define ON_WINDOWS 1 #define NO_SYMLINKS 1 #define NO_PERMS 1 #define NO_SIGACTION 1 #ifndef WIN32_LEAN_AND_MEAN #define WIN32_LEAN_AND_MEAN #endif #include #include #include "win_stat.h" #define S_ISREG WS_ISREG #define S_ISDIR WS_ISDIR #endif /* Win32 */ #include #include #include #include #include "string_malloc.h" #include "jody_hash.h" #include "jody_sort.h" #include "version.h" /* Optional btrfs support */ #ifdef ENABLE_BTRFS #include #include #endif /* Some types are different on Windows */ #ifdef ON_WINDOWS typedef uint64_t jdupes_ino_t; typedef uint32_t jdupes_mode_t; extern const char dir_sep; #ifdef UNICODE extern const wchar_t *FILE_MODE_RO; #else extern const char *FILE_MODE_RO; #endif /* UNICODE */ #else /* Not Windows */ #include typedef ino_t jdupes_ino_t; typedef mode_t jdupes_mode_t; extern const char *FILE_MODE_RO; extern const char dir_sep; #ifdef UNICODE #error Do not define UNICODE on non-Windows platforms. #undef UNICODE #endif #endif /* _WIN32 || __CYGWIN__ */ /* Windows + Unicode compilation */ #ifdef UNICODE extern wchar_t wname[PATH_MAX]; extern wchar_t wname2[PATH_MAX]; extern wchar_t wstr[PATH_MAX]; extern int out_mode; extern int err_mode; #define M2W(a,b) MultiByteToWideChar(CP_UTF8, 0, a, -1, (LPWSTR)b, PATH_MAX) #define W2M(a,b) WideCharToMultiByte(CP_UTF8, 0, a, -1, (LPSTR)b, PATH_MAX, NULL, NULL) #endif /* UNICODE */ #ifndef NO_SYMLINKS #include "jody_paths.h" #endif #define ISFLAG(a,b) ((a & b) == b) #define SETFLAG(a,b) (a |= b) #define CLEARFLAG(a,b) (a &= (~b)) /* Low memory option overrides */ #ifdef LOW_MEMORY #undef USE_TREE_REBALANCE #ifndef NO_PERMS #define NO_PERMS 1 #endif #endif /* Aggressive verbosity for deep debugging */ #ifdef LOUD_DEBUG #ifndef DEBUG #define DEBUG #endif #define LOUD(...) if ISFLAG(flags, F_LOUD) __VA_ARGS__ #else #define LOUD(a) #endif /* Compile out debugging stat counters unless requested */ #ifdef DEBUG #define DBG(a) a #ifndef TREE_DEPTH_STATS #define TREE_DEPTH_STATS #endif #else #define DBG(a) #endif /* How many operations to wait before updating progress counters */ #define DELAY_COUNT 256 /* Behavior modification flags */ extern uint_fast32_t flags; #define F_RECURSE 0x00000001U #define F_HIDEPROGRESS 0x00000002U #define F_SOFTABORT 0x00000004U #define F_FOLLOWLINKS 0x00000008U #define F_DELETEFILES 0x00000010U #define F_INCLUDEEMPTY 0x00000020U #define F_CONSIDERHARDLINKS 0x00000040U #define F_SHOWSIZE 0x00000080U #define F_OMITFIRST 0x00000100U #define F_RECURSEAFTER 0x00000200U #define F_NOPROMPT 0x00000400U #define F_SUMMARIZEMATCHES 0x00000800U #define F_EXCLUDEHIDDEN 0x00001000U #define F_PERMISSIONS 0x00002000U #define F_HARDLINKFILES 0x00004000U #define F_EXCLUDESIZE 0x00008000U #define F_QUICKCOMPARE 0x00010000U #define F_USEPARAMORDER 0x00020000U #define F_DEDUPEFILES 0x00040000U #define F_REVERSESORT 0x00080000U #define F_ISOLATE 0x00100000U #define F_MAKESYMLINKS 0x00200000U #define F_PRINTMATCHES 0x00400000U #define F_ONEFS 0x00800000U #define F_LOUD 0x40000000U #define F_DEBUG 0x80000000U /* Per-file true/false flags */ #define F_VALID_STAT 0x00000001U #define F_HASH_PARTIAL 0x00000002U #define F_HASH_FULL 0x00000004U #define F_HAS_DUPES 0x00000008U #define F_IS_SYMLINK 0x00000010U typedef enum { ORDER_NAME = 0, ORDER_TIME } ordertype_t; #ifndef PARTIAL_HASH_SIZE #define PARTIAL_HASH_SIZE 4096 #endif /* Maximum path buffer size to use; must be large enough for a path plus * any work that might be done to the array it's stored in. PATH_MAX is * not always true. Read this article on the false promises of PATH_MAX: * http://insanecoding.blogspot.com/2007/11/pathmax-simply-isnt.html * Windows + Unicode needs a lot more space than UTF-8 in Linux/Mac OS X */ #ifndef PATHBUF_SIZE #define PATHBUF_SIZE 4096 #endif /* For interactive deletion input */ #define INPUT_SIZE 512 /* Per-file information */ typedef struct _file { struct _file *duplicates; struct _file *next; char *d_name; dev_t device; jdupes_mode_t mode; off_t size; jdupes_ino_t inode; hash_t filehash_partial; hash_t filehash; time_t mtime; uint32_t flags; /* Status flags */ unsigned int user_order; /* Order of the originating command-line parameter */ #ifndef NO_PERMS uid_t uid; gid_t gid; #endif #ifndef NO_HARDLINKS #ifndef ON_WINDOWS nlink_t nlink; #else uint32_t nlink; /* link count on Windows is always a DWORD */ #endif #endif } file_t; typedef struct _filetree { file_t *file; struct _filetree *left; struct _filetree *right; #ifdef USE_TREE_REBALANCE struct _filetree *parent; unsigned int left_weight; unsigned int right_weight; #endif /* USE_TREE_REBALANCE */ } filetree_t; /* This gets used in many functions */ #ifdef ON_WINDOWS extern struct winstat ws; #else extern struct stat s; #endif /* -X exclusion parameter stack */ struct exclude { struct exclude *next; unsigned int flags; int64_t size; char param[]; }; /* Exclude parameter flags */ #define X_DIR 0x00000001U #define X_SIZE_EQ 0x00000002U #define X_SIZE_GT 0x00000004U #define X_SIZE_LT 0x00000008U /* The X-than-or-equal are combination flags */ #define X_SIZE_GTEQ 0x00000006U #define X_SIZE_LTEQ 0x0000000aU /* Size specifier flags */ #define XX_EXCL_SIZE 0x0000000eU /* Flags that use numeric offset instead of a string */ #define XX_EXCL_OFFSET 0x0000000eU /* Flags that require a data parameter */ #define XX_EXCL_DATA 0x0000000fU /* Exclude definition array */ struct exclude_tags { const char * const tag; const uint32_t flags; }; extern const struct exclude_tags exclude_tags[]; extern struct exclude *exclude_head; /* Suffix definitions (treat as case-insensitive) */ struct size_suffix { const char * const suffix; const int64_t multiplier; }; extern const struct size_suffix size_suffix[]; extern void oom(const char * const restrict msg); extern void nullptr(const char * restrict func); extern int file_has_changed(file_t * const restrict file); extern int getfilestats(file_t * const restrict file); extern int getdirstats(const char * const restrict name, jdupes_ino_t * const restrict inode, dev_t * const restrict dev); extern int check_conditions(const file_t * const restrict file1, const file_t * const restrict file2); extern unsigned int get_max_dupes(const file_t *files, unsigned int * const restrict max, unsigned int * const restrict n_files); #ifdef __cplusplus } #endif #endif /* JDUPES_H */ jdupes-1.9/jody_cacheinfo.c000066400000000000000000000051411321117252000157600ustar00rootroot00000000000000/* Detect and report size of CPU caches * * Copyright (C) 2017 by Jody Bruchon * Distributed under The MIT License * * If an error occurs or a cache is missing, zeroes are returned * Unified caches populate l1/l2/l3; split caches populate lXi/lXd instead */ #include #include #include #include "jody_cacheinfo.h" static char *pathidx; static char buf[16]; static char path[64] = "/sys/devices/system/cpu/cpu0/cache/index"; /*** End declarations, begin code ***/ /* Linux sysfs */ #ifndef ON_WINDOWS static size_t read_procfile(const char * const restrict name) { FILE *fp; size_t i; if (name == NULL) return 0; memset(buf, 0, 16); /* Create path */ *pathidx = '\0'; strcpy(pathidx, name); fp = fopen(path, "rb"); if (fp == NULL) return 0; i = fread(buf, 1, 16, fp); if (ferror(fp)) return 0; fclose(fp); return i; } void get_proc_cacheinfo(struct proc_cacheinfo *pci) { char *idx; size_t i; size_t size; int level; char type; char index; if (pci == NULL) return; memset(pci, 0, sizeof(struct proc_cacheinfo)); i = strlen(path); if (i > 48) return; idx = path + i; pathidx = idx + 1; *pathidx = '/'; pathidx++; for (index = '0'; index < '9'; index++) { *idx = index; /* Get the level for this index */ if (read_procfile("level") == 0) break; if (*buf < '1' || *buf > '3') break; else level = (*buf) + 1 - '1'; /* Get the size */ if (read_procfile("size") == 0) break; size = (size_t)atoi(buf) * 1024; if (size == 0) break; /* Get the type */ if (read_procfile("type") == 0) break; if (*buf != 'U' && *buf != 'I' && *buf != 'D') break; type = *buf; /* Act on it */ switch (type) { case 'D': switch (level) { case 1: pci->l1d = size; break; case 2: pci->l2d = size; break; case 3: pci->l3d = size; break; default: return; }; break; case 'I': switch (level) { case 1: pci->l1i = size; break; case 2: pci->l2i = size; break; case 3: pci->l3i = size; break; default: return; }; break; case 'U': switch (level) { case 1: pci->l1 = size; break; case 2: pci->l2 = size; break; case 3: pci->l3 = size; break; default: return; }; break; default: return; } /* Continue to next index */ } return; } #else #define get_proc_cacheinfo(a) #endif /* ON_WINDOWS */ /* This is for testing only */ #if 0 int main(void) { static struct proc_cacheinfo pci; get_proc_cacheinfo(&pci); printf("Cache: L1 %d,%d,%d L2 %d,%d,%d L3 %d,%d,%d\n", pci.l1, pci.l1i, pci.l1d, pci.l2, pci.l2i, pci.l2d, pci.l3, pci.l3i, pci.l3d); return 0; } #endif jdupes-1.9/jody_cacheinfo.h000066400000000000000000000010271321117252000157640ustar00rootroot00000000000000/* Detect size of CPU data caches * See jody_cacheinfo.c for license information */ #ifndef JODY_CACHEINFO_H #define JODY_CACHEINFO_H #ifdef __cplusplus extern "C" { #endif /* Cache information structure * Split caches populate i/d, unified caches populate non-i/d */ struct proc_cacheinfo { size_t l1; size_t l1i; size_t l1d; size_t l2; size_t l2i; size_t l2d; size_t l3; size_t l3i; size_t l3d; }; extern void get_proc_cacheinfo(struct proc_cacheinfo *pci); #ifdef __cplusplus } #endif #endif /* JODY_CACHEINFO_H */ jdupes-1.9/jody_hash.c000066400000000000000000000066621321117252000147750ustar00rootroot00000000000000/* Jody Bruchon's fast hashing function * * This function was written to generate a fast hash that also has a * fairly low collision rate. The collision rate is much higher than * a secure hash algorithm, but the calculation is drastically simpler * and faster. * * Copyright (C) 2014-2017 by Jody Bruchon * Released under The MIT License */ #include #include #include "jody_hash.h" /* DO NOT modify the shift unless you know what you're doing. * This shift was decided upon after lots of testing and * changing it will likely cause lots of hash collisions. */ #ifndef JODY_HASH_SHIFT #define JODY_HASH_SHIFT 14 #endif /* The salt value's purpose is to cause each byte in the * hash_t word to have a positionally dependent variation. * It is injected into the calculation to prevent a string of * identical bytes from easily producing an identical hash. */ /* The tail mask table is used for block sizes that are * indivisible by the width of a hash_t. It is ANDed with the * final hash_t-sized element to zero out data in the buffer * that is not part of the data to be hashed. */ /* Set hash parameters based on requested hash width */ #if JODY_HASH_WIDTH == 64 #define JODY_HASH_CONSTANT 0x1f3d5b79U static const hash_t tail_mask[] = { 0x0000000000000000, 0x00000000000000ff, 0x000000000000ffff, 0x0000000000ffffff, 0x00000000ffffffff, 0x000000ffffffffff, 0x0000ffffffffffff, 0x00ffffffffffffff, 0xffffffffffffffff }; #endif /* JODY_HASH_WIDTH == 64 */ #if JODY_HASH_WIDTH == 32 #define JODY_HASH_CONSTANT 0x1f3d5b79U static const hash_t tail_mask[] = { 0x00000000, 0x000000ff, 0x0000ffff, 0x00ffffff, 0xffffffff, }; #endif /* JODY_HASH_WIDTH == 32 */ #if JODY_HASH_WIDTH == 16 #define JODY_HASH_CONSTANT 0x1f5bU static const hash_t tail_mask[] = { 0x0000, 0x00ff, 0xffff, }; #endif /* JODY_HASH_WIDTH == 16 */ /* Hash a block of arbitrary size; must be divisible by sizeof(hash_t) * The first block should pass a start_hash of zero. * All blocks after the first should pass start_hash as the value * returned by the last call to this function. This allows hashing * of any amount of data. If data is not divisible by the size of * hash_t, it is MANDATORY that the caller provide a data buffer * which is divisible by sizeof(hash_t). */ extern hash_t jody_block_hash(const hash_t * restrict data, const hash_t start_hash, const size_t count) { hash_t hash = start_hash; hash_t element; hash_t partial_salt; size_t len; /* Don't bother trying to hash a zero-length block */ if (count == 0) return hash; len = count / sizeof(hash_t); for (; len > 0; len--) { element = *data; hash += element; hash += JODY_HASH_CONSTANT; hash = (hash << JODY_HASH_SHIFT) | hash >> (sizeof(hash_t) * 8 - JODY_HASH_SHIFT); /* bit rotate left */ hash ^= element; hash = (hash << JODY_HASH_SHIFT) | hash >> (sizeof(hash_t) * 8 - JODY_HASH_SHIFT); hash ^= JODY_HASH_CONSTANT; hash += element; data++; } /* Handle data tail (for blocks indivisible by sizeof(hash_t)) */ len = count & (sizeof(hash_t) - 1); if (len) { partial_salt = JODY_HASH_CONSTANT & tail_mask[len]; element = *data & tail_mask[len]; hash += element; hash += partial_salt; hash = (hash << JODY_HASH_SHIFT) | hash >> (sizeof(hash_t) * 8 - JODY_HASH_SHIFT); hash ^= element; hash = (hash << JODY_HASH_SHIFT) | hash >> (sizeof(hash_t) * 8 - JODY_HASH_SHIFT); hash ^= partial_salt; hash += element; } return hash; } jdupes-1.9/jody_hash.h000066400000000000000000000014641321117252000147750ustar00rootroot00000000000000/* Jody Bruchon's fast hashing function (headers) * See jody_hash.c for license information */ #ifndef JODY_HASH_H #define JODY_HASH_H #ifdef __cplusplus extern "C" { #endif /* Required for uint64_t */ #include /* Width of a jody_hash. Changing this will also require * changing the width of tail masks to match. */ #ifndef JODY_HASH_WIDTH #define JODY_HASH_WIDTH 64 #endif #if JODY_HASH_WIDTH == 64 typedef uint64_t hash_t; #endif #if JODY_HASH_WIDTH == 32 typedef uint32_t hash_t; #endif #if JODY_HASH_WIDTH == 16 typedef uint16_t hash_t; #endif /* Version increments when algorithm changes incompatibly */ #define JODY_HASH_VERSION 5 extern hash_t jody_block_hash(const hash_t * restrict data, const hash_t start_hash, const size_t count); #ifdef __cplusplus } #endif #endif /* JODY_HASH_H */ jdupes-1.9/jody_paths.c000066400000000000000000000105451321117252000151640ustar00rootroot00000000000000/* Jody Bruchon's path manipulation code library * * Copyright (C) 2014-2017 by Jody Bruchon * Released under The MIT License */ #include #include #include #include #include #include "jody_paths.h" /* Collapse dot-dot and single dot path components * This code MUST be passed a full file pathname (starting with '/') */ extern int collapse_dotdot(char * const path) { char *p; /* string copy input */ char *out; /* string copy output */ unsigned int i = 0; /* Fail if not passed an absolute path */ if (*path != '/') return -1; p = path; out = path; while (*p != '\0') { /* Abort if we're too close to the end of the buffer */ if (i >= (PATHBUF_SIZE - 3)) return -2; /* Skip repeated slashes */ while (*p == '/' && *(p + 1) == '/') { p++; i++; } /* Scan for '/./', '/..', '/.\0' combinations */ if (*p == '/' && *(p + 1) == '.' && (*(p + 2) == '.' || *(p + 2) == '/' || *(p + 2) == '\0')) { /* Check for '../' or terminal '..' */ if (*(p + 2) == '.' && (*(p + 3) == '/' || *(p + 3) == '\0')) { /* Found a dot-dot; pull everything back to the previous directory */ p += 3; i += 3; /* If already at root, skip over the dot-dot */ if (i == 0) continue; /* Don't seek back past the first character */ if ((uintptr_t)out == (uintptr_t)path) continue; out--; while (*out != '/') out--; if (*p == '\0') break; continue; } else if (*(p + 2) == '/' || *(p + 2) == '\0') { /* Found a single dot; seek input ptr past it */ p += 2; i += 2; if (*p == '\0') break; continue; } /* Fall through: not a dot or dot-dot, just a slash */ } /* Copy all remaining text */ *out = *p; p++; out++; i++; } /* If only a root slash remains, be sure to keep it */ if ((uintptr_t)out == (uintptr_t)path) { *out = '/'; out++; } /* Output must always be terminated properly */ *out = '\0'; return 0; } /* Create a relative symbolic link path for a destination file */ extern int make_relative_link_name(const char * const src, const char * const dest, char * rel_path) { static char p1[PATHBUF_SIZE * 2], p2[PATHBUF_SIZE * 2]; static char *sp, *dp, *ss; if (!src || !dest) goto error_null_param; /* Get working directory path and prefix to pathnames if needed */ if (*src != '/' || *dest != '/') { if (!getcwd(p1, PATHBUF_SIZE * 2)) goto error_getcwd; *(p1 + (PATHBUF_SIZE * 2) - 1) = '\0'; strncat(p1, "/", PATHBUF_SIZE * 2); strncpy(p2, p1, PATHBUF_SIZE * 2); } /* If an absolute path is provided, use it as-is */ if (*src == '/') *p1 = '\0'; if (*dest == '/') *p2 = '\0'; /* Concatenate working directory to relative paths */ strncat(p1, src, PATHBUF_SIZE); strncat(p2, dest, PATHBUF_SIZE); /* Collapse . and .. path components */ if (collapse_dotdot(p1) != 0) goto error_cdd; if (collapse_dotdot(p2) != 0) goto error_cdd; /* Find where paths differ, remembering each slash along the way */ sp = p1; dp = p2; ss = p1; while (*sp == *dp && *sp != '\0' && *dp != '\0') { if (*sp == '/') ss = sp; sp++; dp++; } /* If paths are 100% identical then the files are the same file */ if (*sp == '\0' && *dp == '\0') return 1; /* Replace dirs in destination path with dot-dot */ while (*dp != '\0') { if (*dp == '/') { *rel_path++ = '.'; *rel_path++ = '.'; *rel_path++ = '/'; } dp++; } /* Copy the file name into rel_path and return */ ss++; while (*ss != '\0') *rel_path++ = *ss++; /* . and .. dirs at end are invalid */ if (*(rel_path - 1) == '.') if (*(rel_path - 2) == '/' || (*(rel_path - 2) == '.' && *(rel_path - 3) == '/')) goto error_dir_end; if (*(rel_path - 1) == '/') goto error_dir_end; *rel_path = '\0'; return 0; error_null_param: fprintf(stderr, "Internal error: get_relative_name has NULL parameter\n"); fprintf(stderr, "Report this as a serious bug to the author\n"); exit(EXIT_FAILURE); error_getcwd: fprintf(stderr, "error: couldn't get the current directory\n"); return -1; error_cdd: fprintf(stderr, "internal error: collapse_dotdot() call failed\n"); return -2; error_dir_end: fprintf(stderr, "internal error: get_relative_name() result has directory at end\n"); return -3; } jdupes-1.9/jody_paths.h000066400000000000000000000007111321117252000151630ustar00rootroot00000000000000/* Jody Bruchon's path manipulation code library * See jody_paths.c for license information */ #ifndef JODY_PATHS_H #define JODY_PATHS_H #ifdef __cplusplus extern "C" { #endif #ifndef PATHBUF_SIZE #define PATHBUF_SIZE 4096 #endif extern int collapse_dotdot(char * const path); extern int make_relative_link_name(const char * const src, const char * const dest, char * rel_path); #ifdef __cplusplus } #endif #endif /* JODY_PATHS_H */ jdupes-1.9/jody_sort.c000066400000000000000000000050361321117252000150330ustar00rootroot00000000000000/* Jody Bruchon's sorting code library * * Copyright (C) 2014-2017 by Jody Bruchon * Released under The MIT License */ #include #include "jody_sort.h" #define IS_NUM(a) (((a >= '0') && (a <= '9')) ? 1 : 0) extern int numeric_sort(const char * restrict c1, const char * restrict c2, int sort_direction) { int len1 = 0, len2 = 0; int precompare = 0; if (c1 == NULL || c2 == NULL) return -99; /* Numerically correct sort */ while (*c1 != '\0' && *c2 != '\0') { /* Reset string length counters */ len1 = 0; len2 = 0; /* Skip all sequences of zeroes */ while (*c1 == '0') { len1++; c1++; } while (*c2 == '0') { len2++; c2++; } /* If both chars are numeric, do a numeric comparison */ if (IS_NUM(*c1) && IS_NUM(*c2)) { precompare = 0; /* Scan numbers and get preliminary results */ while (IS_NUM(*c1) && IS_NUM(*c2)) { if (*c1 < *c2) precompare = -sort_direction; if (*c1 > *c2) precompare = sort_direction; len1++; len2++; c1++; c2++; /* Skip remaining digit pairs after any * difference is found */ if (precompare != 0) { while (IS_NUM(*c1) && IS_NUM(*c2)) { len1++; len2++; c1++; c2++; } break; } } /* One numeric and one non-numeric means the * numeric one is larger and sorts later */ if (IS_NUM(*c1) ^ IS_NUM(*c2)) { if (IS_NUM(*c1)) return sort_direction; else return -sort_direction; } /* If the last test fell through, numbers are * of equal length. Use the precompare result * as the result for this number comparison. */ if (precompare != 0) return precompare; } /* Do normal comparison */ if (*c1 == *c2 && *c1 != '\0' && *c2 != '\0') { c1++; c2++; len1++; len2++; /* Put symbols and spaces after everything else */ } else if (*c2 < '.' && *c1 >= '.') return -sort_direction; else if (*c1 < '.' && *c2 >= '.') return sort_direction; /* Normal strcmp() style compare */ else if (*c1 > *c2) return sort_direction; else return -sort_direction; } /* Longer strings generally sort later */ if (len1 < len2) return -sort_direction; if (len1 > len2) return sort_direction; /* Normal strcmp() style comparison */ if (*c1 == '\0' && *c2 != '\0') return -sort_direction; if (*c1 != '\0' && *c2 == '\0') return sort_direction; /* Fall through: the strings are equal */ return 0; } jdupes-1.9/jody_sort.h000066400000000000000000000005171321117252000150370ustar00rootroot00000000000000/* Jody Bruchon's sorting code library * See jody_sort.c for license information */ #ifndef JODY_SORT_H #define JODY_SORT_H #ifdef __cplusplus extern "C" { #endif extern int numeric_sort(const char * restrict c1, const char * restrict c2, int sort_direction); #ifdef __cplusplus } #endif #endif /* JODY_SORT_H */ jdupes-1.9/jody_win_unicode.c000066400000000000000000000036461321117252000163540ustar00rootroot00000000000000/* Jody Bruchon's Windows Unicode helper routines * * Copyright (C) 2014-2017 by Jody Bruchon * Released under The MIT License */ #include "jdupes.h" #ifdef UNICODE #include #include #include /* Convert slashes to backslashes in a file path */ extern void slash_convert(char *path) { while (*path != '\0') { if (*path == '/') *path = '\\'; path++; } return; } /* Copy Windows wide character arguments to UTF-8 */ extern void widearg_to_argv(int argc, wchar_t **wargv, char **argv) { static char temp[PATH_MAX]; int len; if (!argv) goto error_bad_argv; for (int counter = 0; counter < argc; counter++) { len = W2M(wargv[counter], &temp); if (len < 1) goto error_wc2mb; argv[counter] = (char *)string_malloc((size_t)len + 1); if (!argv[counter]) oom("widearg_to_argv()"); strncpy(argv[counter], temp, (size_t)len + 1); } return; error_bad_argv: fprintf(stderr, "fatal: bad argv pointer\n"); exit(EXIT_FAILURE); error_wc2mb: fprintf(stderr, "fatal: WideCharToMultiByte failed\n"); exit(EXIT_FAILURE); } /* Print a string that is wide on Windows but normal on POSIX */ extern int fwprint(FILE * const restrict stream, const char * const restrict str, const int cr) { int retval; int stream_mode = out_mode; if (stream == stderr) stream_mode = err_mode; if (stream_mode == _O_U16TEXT) { /* Convert to wide string and send to wide console output */ if (!MultiByteToWideChar(CP_UTF8, 0, str, -1, (LPWSTR)wstr, PATH_MAX)) return -1; fflush(stream); _setmode(_fileno(stream), stream_mode); retval = fwprintf(stream, L"%S%S", wstr, cr ? L"\n" : L""); fflush(stream); _setmode(_fileno(stream), _O_TEXT); return retval; } else { return fprintf(stream, "%s%s", str, cr ? "\n" : ""); } } #else #define fwprint(a,b,c) fprintf(a, "%s%s", b, c ? "\n" : "") #define slash_convert(a) #endif /* UNICODE */ jdupes-1.9/jody_win_unicode.h000066400000000000000000000011501321117252000163450ustar00rootroot00000000000000/* Jody Bruchon's Windows Unicode helper routines * See jody_win_unicode.c for license information */ #ifndef JODY_WIN_UNICODE_H #define JODY_WIN_UNICODE_H #ifdef __cplusplus extern "C" { #endif #include "jdupes.h" #ifdef UNICODE extern void slash_convert(char *path); extern void widearg_to_argv(int argc, wchar_t **wargv, char **argv); extern int fwprint(FILE * const restrict stream, const char * const restrict str, const int cr); #else #define fwprint(a,b,c) fprintf(a, "%s%s", b, c ? "\n" : "") #define slash_convert(a) #endif /* UNICODE */ #ifdef __cplusplus } #endif #endif /* JODY_WIN_UNICODE_H */ jdupes-1.9/string_malloc.c000066400000000000000000000170251321117252000156550ustar00rootroot00000000000000/* * String table allocator * A replacement for malloc() for tables of fixed strings * * Copyright (C) 2015-2017 by Jody Bruchon * Released under The MIT License */ #include #include #include "string_malloc.h" /* Size of pages to allocate at once. Must be divisible by uintptr_t. * The maximum object size is this page size minus about 16 bytes! */ #ifndef SMA_PAGE_SIZE #define SMA_PAGE_SIZE 262144 #endif /* Max freed pointers to remember. Increasing this number allows storing * more free objects but can slow down allocations. Don't increase it if * the program's total reused freed alloc counter doesn't increase as a * result or you're slowing allocs down to no benefit. */ #ifndef SMA_MAX_FREE #define SMA_MAX_FREE 32 #endif #ifdef DEBUG uintmax_t sma_allocs = 0; uintmax_t sma_free_ignored = 0; uintmax_t sma_free_good = 0; uintmax_t sma_free_merged = 0; uintmax_t sma_free_replaced = 0; uintmax_t sma_free_reclaimed = 0; uintmax_t sma_free_scanned = 0; uintmax_t sma_free_tails = 0; #define DBG(a) a #else #define DBG(a) #endif /* This is used to bypass string_malloc for debugging */ #ifdef SMA_PASSTHROUGH void *string_malloc(size_t len) { return malloc(len); } void string_free(void *ptr) { free(ptr); return; } void string_malloc_destroy(void) { return; } #else /* Not SMA_PASSTHROUGH mode */ struct freelist { void *addr; size_t size; }; static void *sma_head = NULL; static uintptr_t *sma_curpage = NULL; static unsigned int sma_pages = 0; static struct freelist sma_freelist[SMA_MAX_FREE]; static int sma_freelist_cnt = 0; static size_t sma_nextfree = sizeof(uintptr_t); /* Scan the freed chunk list for a suitably sized object */ static inline void *scan_freelist(const size_t size) { size_t *object, *min_p; size_t sz, min = 0; int i, used = 0, min_i = -1; /* Don't bother scanning if the list is empty */ if (sma_freelist_cnt == 0) return NULL; for (i = 0; i < SMA_MAX_FREE; i++) { /* Stop scanning once we run out of valid entries */ if (used == sma_freelist_cnt) return NULL; DBG(sma_free_scanned++;) object = sma_freelist[i].addr; /* Skip empty entries */ if (object == NULL) continue; sz = sma_freelist[i].size; used++; /* Skip smaller objects */ if (sz < size) continue; /* Object is big enough; record if it's the new minimum */ if (min == 0 || sz <= min) { min = sz; min_i = i; /* Always stop scanning if exact sized object found */ if (sz == size) break; } } /* Enhancement TODO: split the free item if it's big enough */ /* Return smallest object found and delete from free list */ if (min_i != -1) { min_p = sma_freelist[min_i].addr; sma_freelist[min_i].addr = NULL; sma_freelist_cnt--; min_p++; return (void *)min_p; } /* Fall through - free list search failed */ return NULL; } /* malloc() a new page for string_malloc to use */ static inline void *string_malloc_page(void) { uintptr_t * restrict pageptr; /* Allocate page and set up pointers at page starts */ pageptr = (uintptr_t *)malloc(SMA_PAGE_SIZE); if (pageptr == NULL) return NULL; *pageptr = (uintptr_t)NULL; /* Link previous page to this page, if applicable */ if (sma_curpage != NULL) *sma_curpage = (uintptr_t)pageptr; /* Update last page pointers and total page counter */ sma_curpage = pageptr; sma_pages++; return (void *)pageptr; } void *string_malloc(size_t len) { const void * restrict page = (char *)sma_curpage; static size_t *address; /* Calling with no actual length is invalid */ if (len < 1) return NULL; /* Align objects where possible */ if (len & (sizeof(uintptr_t) - 1)) { len &= ~(sizeof(uintptr_t) - 1); len += sizeof(uintptr_t); } /* Pass-through allocations larger than maximum object size to malloc() */ if (len > (SMA_PAGE_SIZE - sizeof(uintptr_t) - sizeof(size_t))) { /* Allocate the space */ address = (size_t *)malloc(len + sizeof(size_t)); if (!address) return NULL; /* Prefix object with its size */ *address = len; address++; DBG(sma_allocs++;) return (void *)address; } /* Initialize on first use */ if (sma_pages == 0) { /* Initialize the freed object list */ for (int i = 0; i < SMA_MAX_FREE; i++) sma_freelist[i].addr = NULL; /* Allocate first page and set up for first allocation */ sma_head = string_malloc_page(); if (sma_head == NULL) return NULL; sma_nextfree = sizeof(uintptr_t); page = sma_head; } /* Allocate objects from the free list first */ address = (size_t *)scan_freelist(len); if (address != NULL) { DBG(sma_free_reclaimed++;) return (void *)address; } /* Allocate new page if this object won't fit */ if ((sma_nextfree + len + sizeof(size_t)) > SMA_PAGE_SIZE) { size_t sz; size_t *tailaddr; /* See if page tail has usable remaining capacity */ sz = sma_nextfree + sizeof(size_t) + sizeof(uintptr_t); /* Try to add page tail to free list rather than waste it */ if (sz <= SMA_PAGE_SIZE) { sz = SMA_PAGE_SIZE - sma_nextfree - sizeof(size_t); tailaddr = (size_t *)((uintptr_t)page + sma_nextfree); *tailaddr = (size_t)sz; tailaddr++; string_free(tailaddr); DBG(sma_free_tails++;) } page = string_malloc_page(); if (!page) return NULL; sma_nextfree = sizeof(uintptr_t); } /* Allocate the space */ address = (size_t *)((uintptr_t)page + sma_nextfree); /* Prefix object with its size */ *address = len; address++; sma_nextfree += len + sizeof(size_t); DBG(sma_allocs++;) return (void *)address; } /* Free an object, adding to free list if possible */ void string_free(void * const restrict addr) { int freefull = 0; struct freelist *emptyslot = NULL; static uintptr_t before, after; static size_t * restrict sizeptr; static size_t size; /* Do nothing on NULL address */ if (addr == NULL) goto sf_failed; /* Get address to real start of object and the object size */ sizeptr = (size_t *)addr - 1; size = *(size_t *)sizeptr; /* Calculate after-block pointer for merge checks */ after = (uintptr_t)addr + size; /* If free list is full, try to replace a smaller object */ if (sma_freelist_cnt == SMA_MAX_FREE) freefull = 1; /* Attempt to merge into other free objects */ for (int i = 0; i < SMA_MAX_FREE; i++) { /* Record first empty slot */ if (emptyslot == NULL && sma_freelist[i].addr == NULL) { emptyslot = &(sma_freelist[i]); // break; } else if (freefull != 0 && sma_freelist[i].size < size) { /* Replace object if list is full and new one is bigger */ emptyslot = &(sma_freelist[i]); DBG(sma_free_replaced++;) break; } else if ((uintptr_t)(sma_freelist[i].addr) == after) { /* Merge with a block after this one */ sma_freelist[i].addr = sizeptr; sma_freelist[i].size += (size + sizeof(size_t *)); DBG(sma_free_good++;) DBG(sma_free_merged++;) return; } else { before = (uintptr_t)addr + size; if (before == (uintptr_t)(sma_freelist[i].addr)) { /* Merge with a block before this one */ sma_freelist[i].size += (size + sizeof(size_t *)); DBG(sma_free_good++;) DBG(sma_free_merged++;) } } } /* Merges failed; add to empty slot (if any found) */ if (emptyslot != NULL) { if (emptyslot->addr == NULL) sma_freelist_cnt++; emptyslot->addr = sizeptr; emptyslot->size = size; DBG(sma_free_good++;) return; } /* Fall through */ sf_failed: DBG(sma_free_ignored++;) return; } /* Destroy all allocated pages */ void string_malloc_destroy(void) { uintptr_t *cur; uintptr_t *next; cur = sma_head; if (sma_head == NULL) return; while (sma_pages > 0) { next = (uintptr_t *)*cur; free(cur); cur = next; sma_pages--; } sma_head = NULL; return; } #endif /* SMA_PASSTHROUGH */ jdupes-1.9/string_malloc.h000066400000000000000000000013051321117252000156540ustar00rootroot00000000000000/* String table allocator * A replacement for malloc() for tables of fixed strings * See string_malloc.c for license information */ #ifndef STRING_MALLOC_H #define STRING_MALLOC_H #ifdef __cplusplus extern "C" { #endif #ifdef DEBUG extern uintmax_t sma_allocs; extern uintmax_t sma_free_ignored; extern uintmax_t sma_free_good; extern uintmax_t sma_free_merged; extern uintmax_t sma_free_replaced; extern uintmax_t sma_free_scanned; extern uintmax_t sma_free_reclaimed; extern uintmax_t sma_free_tails; #endif extern void *string_malloc(size_t len); extern void string_free(void * const restrict addr); extern void string_malloc_destroy(void); #ifdef __cplusplus } #endif #endif /* STRING_MALLOC_H */ jdupes-1.9/testdir/000077500000000000000000000000001321117252000143255ustar00rootroot00000000000000jdupes-1.9/testdir/.hidden_dir/000077500000000000000000000000001321117252000164745ustar00rootroot00000000000000jdupes-1.9/testdir/.hidden_dir/hiddendir_two000066400000000000000000000000041321117252000212340ustar00rootroot00000000000000two jdupes-1.9/testdir/.hidden_two000066400000000000000000000000041321117252000164440ustar00rootroot00000000000000two jdupes-1.9/testdir/block_size_tests/000077500000000000000000000000001321117252000176735ustar00rootroot00000000000000jdupes-1.9/testdir/block_size_tests/4095b_file1000066400000000000000000000077771321117252000214630ustar00rootroot00000000000000This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that wojdupes-1.9/testdir/block_size_tests/4095b_file2000066400000000000000000000077771321117252000214640ustar00rootroot00000000000000This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that wojdupes-1.9/testdir/block_size_tests/4096b_file1000066400000000000000000000100001321117252000214310ustar00rootroot00000000000000This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that worjdupes-1.9/testdir/block_size_tests/4096b_file2000066400000000000000000000100001321117252000214320ustar00rootroot00000000000000This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that worjdupes-1.9/testdir/block_size_tests/4097b_file1000066400000000000000000000100011321117252000214330ustar00rootroot00000000000000This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that workjdupes-1.9/testdir/block_size_tests/4097b_file2000066400000000000000000000100011321117252000214340ustar00rootroot00000000000000This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that workjdupes-1.9/testdir/japanese_ありがとう/000077500000000000000000000000001321117252000231455ustar00rootroot00000000000000jdupes-1.9/testdir/japanese_ありがとう/怖い000066400000000000000000000001061321117252000251100ustar00rootroot00000000000000oh hi, this file has a Japanese name for testing this program against!jdupes-1.9/testdir/japanese_ありがとう/美000066400000000000000000000001061321117252000242270ustar00rootroot00000000000000oh hi, this file has a Japanese name for testing this program against!jdupes-1.9/testdir/larger_file_1000066400000000000000000002023601321117252000167460ustar00rootroot00000000000000This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! jdupes-1.9/testdir/larger_file_2000066400000000000000000002023601321117252000167470ustar00rootroot00000000000000This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! jdupes-1.9/testdir/larger_file_3000066400000000000000000002023601321117252000167500ustar00rootroot00000000000000Unlike the other large files, this one is intended to fail matching early. This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner progr jdupes-1.9/testdir/larger_file_4000066400000000000000000002023601321117252000167510ustar00rootroot00000000000000This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner program! :-P If you'll excuse me, I have to copy-paste like crazy now. Have fun! This file is a larger file than the other testdir files. Its purpose is to trigger code that works with files larger than the quick hash block size. Since I did not feel like typing out thousands of lines of text, this long line will be duplicated ad infinitum. If you don't like that, write your own duplicate scanner prog Unlike the other large files, this one is designed to fail matching later. jdupes-1.9/testdir/nine_upsidedown000066400000000000000000000000041321117252000174340ustar00rootroot00000000000000six jdupes-1.9/testdir/notsotinydupe1000066400000000000000000000001021321117252000172460ustar00rootroot00000000000000This is not quite such a small duplicate as the other duplicates. jdupes-1.9/testdir/notsotinydupe2000066400000000000000000000001021321117252000172470ustar00rootroot00000000000000This is not quite such a small duplicate as the other duplicates. jdupes-1.9/testdir/numeric_sort/000077500000000000000000000000001321117252000170365ustar00rootroot00000000000000jdupes-1.9/testdir/numeric_sort/file001000066400000000000000000000000041321117252000201130ustar00rootroot00000000000000foo jdupes-1.9/testdir/numeric_sort/file001a000066400000000000000000000000041321117252000202540ustar00rootroot00000000000000foo jdupes-1.9/testdir/numeric_sort/file002000066400000000000000000000000041321117252000201140ustar00rootroot00000000000000foo jdupes-1.9/testdir/numeric_sort/file020000066400000000000000000000000041321117252000201140ustar00rootroot00000000000000foo jdupes-1.9/testdir/numeric_sort/file021000066400000000000000000000000041321117252000201150ustar00rootroot00000000000000foo jdupes-1.9/testdir/numeric_sort/file030000066400000000000000000000000041321117252000201150ustar00rootroot00000000000000foo jdupes-1.9/testdir/numeric_sort/file1000066400000000000000000000000041321117252000177530ustar00rootroot00000000000000foo jdupes-1.9/testdir/numeric_sort/file10000066400000000000000000000000041321117252000200330ustar00rootroot00000000000000foo jdupes-1.9/testdir/numeric_sort/file100000066400000000000000000000000041321117252000201130ustar00rootroot00000000000000foo jdupes-1.9/testdir/numeric_sort/file10a000066400000000000000000000000041321117252000201740ustar00rootroot00000000000000foo jdupes-1.9/testdir/numeric_sort/file1a2000066400000000000000000000000041321117252000201760ustar00rootroot00000000000000foo jdupes-1.9/testdir/numeric_sort/file2000066400000000000000000000000041321117252000177540ustar00rootroot00000000000000foo jdupes-1.9/testdir/numeric_sort/file3000066400000000000000000000000041321117252000177550ustar00rootroot00000000000000foo jdupes-1.9/testdir/numeric_sort_2/000077500000000000000000000000001321117252000172575ustar00rootroot00000000000000jdupes-1.9/testdir/numeric_sort_2/file1-0 (1).jpg000066400000000000000000000000041321117252000213520ustar00rootroot00000000000000bar jdupes-1.9/testdir/numeric_sort_2/file1-0#1.jpg000066400000000000000000000000041321117252000212340ustar00rootroot00000000000000bar jdupes-1.9/testdir/numeric_sort_2/file1-0.jpg000066400000000000000000000000041321117252000211100ustar00rootroot00000000000000bar jdupes-1.9/testdir/numeric_sort_2/file1-1.jpg000066400000000000000000000000041321117252000211110ustar00rootroot00000000000000bar jdupes-1.9/testdir/numeric_sort_2/file1-10.jpg000066400000000000000000000000041321117252000211710ustar00rootroot00000000000000bar jdupes-1.9/testdir/numeric_sort_2/file1-2.jpg000066400000000000000000000000041321117252000211120ustar00rootroot00000000000000bar jdupes-1.9/testdir/recursed_a/000077500000000000000000000000001321117252000164415ustar00rootroot00000000000000jdupes-1.9/testdir/recursed_a/five000066400000000000000000000000051321117252000173100ustar00rootroot00000000000000five jdupes-1.9/testdir/recursed_a/five_2000066400000000000000000000000051321117252000175310ustar00rootroot00000000000000five jdupes-1.9/testdir/recursed_a/one000066400000000000000000000000041321117252000171370ustar00rootroot00000000000000one jdupes-1.9/testdir/recursed_a/one_2000066400000000000000000000000041321117252000173600ustar00rootroot00000000000000one jdupes-1.9/testdir/recursed_a/symlink_infinite_loop000077700000000000000000000000001321117252000252352../recursed_austar00rootroot00000000000000jdupes-1.9/testdir/recursed_a/two000066400000000000000000000000041321117252000171670ustar00rootroot00000000000000two jdupes-1.9/testdir/recursed_a/two_2000066400000000000000000000000041321117252000174100ustar00rootroot00000000000000two jdupes-1.9/testdir/recursed_b/000077500000000000000000000000001321117252000164425ustar00rootroot00000000000000jdupes-1.9/testdir/recursed_b/four000066400000000000000000000000051321117252000173330ustar00rootroot00000000000000four jdupes-1.9/testdir/recursed_b/one000066400000000000000000000000041321117252000171400ustar00rootroot00000000000000one jdupes-1.9/testdir/recursed_b/three000066400000000000000000000000061321117252000174700ustar00rootroot00000000000000three jdupes-1.9/testdir/recursed_b/two_plus_one000066400000000000000000000000061321117252000210760ustar00rootroot00000000000000three jdupes-1.9/testdir/recursed_c/000077500000000000000000000000001321117252000164435ustar00rootroot00000000000000jdupes-1.9/testdir/recursed_c/five000066400000000000000000000000051321117252000173120ustar00rootroot00000000000000five jdupes-1.9/testdir/recursed_c/level2/000077500000000000000000000000001321117252000176345ustar00rootroot00000000000000jdupes-1.9/testdir/recursed_c/level2/five000066400000000000000000000000051321117252000205030ustar00rootroot00000000000000five jdupes-1.9/testdir/recursed_c/level2/one000066400000000000000000000000041321117252000203320ustar00rootroot00000000000000one jdupes-1.9/testdir/recursed_c/level2/two000066400000000000000000000000041321117252000203620ustar00rootroot00000000000000two jdupes-1.9/testdir/recursed_c/one000066400000000000000000000000041321117252000171410ustar00rootroot00000000000000one jdupes-1.9/testdir/recursed_c/two000066400000000000000000000000041321117252000171710ustar00rootroot00000000000000two jdupes-1.9/testdir/symlink_dir000077700000000000000000000000001321117252000206262recursed_austar00rootroot00000000000000jdupes-1.9/testdir/symlink_test/000077500000000000000000000000001321117252000170525ustar00rootroot00000000000000jdupes-1.9/testdir/symlink_test/regular_file000066400000000000000000000000221321117252000214270ustar00rootroot00000000000000symlink test file jdupes-1.9/testdir/symlink_test/symlinked_file000077700000000000000000000000001321117252000243512regular_fileustar00rootroot00000000000000jdupes-1.9/testdir/symlink_twice_one000077700000000000000000000000001321117252000205212twoustar00rootroot00000000000000jdupes-1.9/testdir/symlink_two000077700000000000000000000000001321117252000173562twoustar00rootroot00000000000000jdupes-1.9/testdir/tinydupe1000066400000000000000000000000011321117252000161610ustar00rootroot00000000000000 jdupes-1.9/testdir/tinydupe2000066400000000000000000000000011321117252000161620ustar00rootroot00000000000000 jdupes-1.9/testdir/twice_one000066400000000000000000000000041321117252000162160ustar00rootroot00000000000000two jdupes-1.9/testdir/two000066400000000000000000000000041321117252000150530ustar00rootroot00000000000000two jdupes-1.9/testdir/unicode_filenames/000077500000000000000000000000001321117252000177765ustar00rootroot00000000000000jdupes-1.9/testdir/unicode_filenames/Ελληνιά000066400000000000000000000001061321117252000252030ustar00rootroot00000000000000oh hi, this file has a Japanese name for testing this program against!jdupes-1.9/testdir/unicode_filenames/怖い000066400000000000000000000001061321117252000217410ustar00rootroot00000000000000oh hi, this file has a Japanese name for testing this program against!jdupes-1.9/testdir/unicode_filenames/美000066400000000000000000000001061321117252000210600ustar00rootroot00000000000000oh hi, this file has a Japanese name for testing this program against!jdupes-1.9/testdir/unicode_filenames/행운을 빈다000066400000000000000000000001061321117252000252660ustar00rootroot00000000000000oh hi, this file has a Japanese name for testing this program against!jdupes-1.9/testdir/with spaces a000066400000000000000000000000141321117252000166560ustar00rootroot00000000000000with spaces jdupes-1.9/testdir/with spaces b000066400000000000000000000000141321117252000166570ustar00rootroot00000000000000with spaces jdupes-1.9/testdir/zero_a000066400000000000000000000000001321117252000155150ustar00rootroot00000000000000jdupes-1.9/testdir/zero_b000066400000000000000000000000001321117252000155160ustar00rootroot00000000000000jdupes-1.9/version.h000066400000000000000000000003751321117252000145120ustar00rootroot00000000000000/* VERSION determines the program's version number * This file is part of jdupes; see jdupes.c for license information */ #ifndef JDUPES_VERSION_H #define JDUPES_VERSION_H #define VER "1.9" #define VERDATE "2017-12-03" #endif /* JDUPES_VERSION_H */ jdupes-1.9/win_stat.c000066400000000000000000000043321321117252000146450ustar00rootroot00000000000000/* * Windows-native code for getting stat()-like information * * Copyright (C) 2017 by Jody Bruchon * Released under The MIT License */ #ifndef WIN32_LEAN_AND_MEAN #define WIN32_LEAN_AND_MEAN #endif #include #include #include "win_stat.h" #include /* Convert NT epoch to UNIX epoch */ static time_t nttime_to_unixtime(const uint64_t * const restrict timestamp) { uint64_t newstamp; memcpy(&newstamp, timestamp, sizeof(uint64_t)); newstamp /= 10000000LL; if (newstamp <= 11644473600LL) return 0; newstamp -= 11644473600LL; return newstamp; } /* Get stat()-like extra information for a file on Windows */ int win_stat(const char * const filename, struct winstat * const restrict buf) { HANDLE hFile; BY_HANDLE_FILE_INFORMATION bhfi; uint64_t timetemp; #ifdef UNICODE static wchar_t wname[PATH_MAX]; if (!buf) return -127; if (!MultiByteToWideChar(CP_UTF8, 0, filename, -1, wname, PATH_MAX)) return -126; hFile = CreateFileW(wname, 0, FILE_SHARE_READ, NULL, OPEN_EXISTING, FILE_FLAG_BACKUP_SEMANTICS, NULL); #else if (!buf) return -127; hFile = CreateFile(filename, 0, FILE_SHARE_READ, NULL, OPEN_EXISTING, FILE_FLAG_BACKUP_SEMANTICS, NULL); #endif if (hFile == INVALID_HANDLE_VALUE) goto failure; if (!GetFileInformationByHandle(hFile, &bhfi)) goto failure2; buf->inode = ((uint64_t)(bhfi.nFileIndexHigh) << 32) + (uint64_t)bhfi.nFileIndexLow; buf->size = ((uint64_t)(bhfi.nFileSizeHigh) << 32) + (uint64_t)bhfi.nFileSizeLow; timetemp = ((uint64_t)(bhfi.ftCreationTime.dwHighDateTime) << 32) + bhfi.ftCreationTime.dwLowDateTime; buf->ctime = nttime_to_unixtime(&timetemp); timetemp = ((uint64_t)(bhfi.ftLastWriteTime.dwHighDateTime) << 32) + bhfi.ftLastWriteTime.dwLowDateTime; buf->mtime = nttime_to_unixtime(&timetemp); timetemp = ((uint64_t)(bhfi.ftLastAccessTime.dwHighDateTime) << 32) + bhfi.ftLastAccessTime.dwLowDateTime; buf->atime = nttime_to_unixtime(&timetemp); buf->device = (uint32_t)bhfi.dwVolumeSerialNumber; buf->nlink = (uint32_t)bhfi.nNumberOfLinks; buf->mode = (uint32_t)bhfi.dwFileAttributes; CloseHandle(hFile); return 0; failure: CloseHandle(hFile); return -1; failure2: CloseHandle(hFile); return -2; } jdupes-1.9/win_stat.h000066400000000000000000000025651321117252000146600ustar00rootroot00000000000000/* Windows-native routines for getting stat()-like information * See win_stat.c for license information */ #ifndef WIN_STAT_H #define WIN_STAT_H #ifdef __cplusplus extern "C" { #endif #ifndef WIN32_LEAN_AND_MEAN #define WIN32_LEAN_AND_MAN #endif #include #include struct winstat { uint64_t inode; int64_t size; uint32_t device; uint32_t nlink; uint32_t mode; time_t ctime; time_t mtime; time_t atime; }; /* stat()-like macros for Windows "mode" flags (file attributes) */ #define WS_ISARCHIVE(mode) ((mode & FILE_ATTRIBUTE_ARCHIVE) ? 1 : 0) #define WS_ISRO(mode) ((mode & FILE_ATTRIBUTE_READONLY) ? 1 : 0) #define WS_ISHIDDEN(mode) ((mode & FILE_ATTRIBUTE_HIDDEN) ? 1 : 0) #define WS_ISSYSTEM(mode) ((mode & FILE_ATTRIBUTE_SYSTEM) ? 1 : 0) #define WS_ISCRYPT(mode) ((mode & FILE_ATTRIBUTE_ENCRYPTED) ? 1 : 0) #define WS_ISDIR(mode) ((mode & FILE_ATTRIBUTE_DIRECTORY) ? 1 : 0) #define WS_ISCOMPR(mode) ((mode & FILE_ATTRIBUTE_COMPRESSED) ? 1 : 0) #define WS_ISREPARSE(mode) ((mode & FILE_ATTRIBUTE_REPARSE) ? 1 : 0) #define WS_ISSPARSE(mode) ((mode & FILE_ATTRIBUTE_SPARSE) ? 1 : 0) #define WS_ISTEMP(mode) ((mode & FILE_ATTRIBUTE_TEMPORARY) ? 1 : 0) #define WS_ISREG(mode) ((mode & FILE_ATTRIBUTE_DIRECTORY) ? 0 : 1) extern int win_stat(const char * const filename, struct winstat * const restrict buf); #ifdef __cplusplus } #endif #endif /* WIN_STAT_H */